diff --git a/src/libfuzzer/fuzzer.rc b/src/libfuzzer/fuzzer.rc index 7003b53cbb5f1..c16a7cf8d3ebe 100644 --- a/src/libfuzzer/fuzzer.rc +++ b/src/libfuzzer/fuzzer.rc @@ -134,7 +134,7 @@ pub pure fn safe_to_use_expr(e: ast::expr, tm: test_mode) -> bool { pub fn safe_to_steal_ty(t: @ast::Ty, tm: test_mode) -> bool { // Restrictions happen to be the same. - safe_to_replace_ty(t.node, tm) + safe_to_replace_ty(&t.node, tm) } // Not type-parameterized: https://github.com/mozilla/rust/issues/898 (FIXED) @@ -175,8 +175,8 @@ pub fn steal(crate: ast::crate, tm: test_mode) -> StolenStuff { } -pub fn safe_to_replace_expr(e: ast::expr_, _tm: test_mode) -> bool { - match e { +pub fn safe_to_replace_expr(e: &ast::expr_, _tm: test_mode) -> bool { + match *e { // https://github.com/mozilla/rust/issues/652 ast::expr_if(*) => { false } ast::expr_block(_) => { false } @@ -188,8 +188,8 @@ pub fn safe_to_replace_expr(e: ast::expr_, _tm: test_mode) -> bool { } } -pub fn safe_to_replace_ty(t: ast::ty_, _tm: test_mode) -> bool { - match t { +pub fn safe_to_replace_ty(t: &ast::ty_, _tm: test_mode) -> bool { + match *t { ast::ty_infer => { false } // always implicit, always top level ast::ty_bot => { false } // in source, can only appear // as the out type of a function @@ -204,7 +204,7 @@ pub fn replace_expr_in_crate(crate: ast::crate, i: uint, ast::crate { let j: @mut uint = @mut 0u; fn fold_expr_rep(j_: @mut uint, i_: uint, newexpr_: ast::expr_, - original: ast::expr_, fld: fold::ast_fold, + original: &ast::expr_, fld: fold::ast_fold, tm_: test_mode) -> ast::expr_ { *j_ += 1u; @@ -221,7 +221,7 @@ pub fn replace_expr_in_crate(crate: ast::crate, i: uint, .. *fold::default_ast_fold() }; let af = fold::make_fold(afp); - let crate2: @ast::crate = @af.fold_crate(crate); + let crate2: @ast::crate = @af.fold_crate(&crate); *crate2 } @@ -231,7 +231,7 @@ pub fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::Ty, tm: test_mode) -> ast::crate { let j: @mut uint = @mut 0u; fn fold_ty_rep(j_: @mut uint, i_: uint, newty_: ast::ty_, - original: ast::ty_, fld: fold::ast_fold, + original: &ast::ty_, fld: fold::ast_fold, tm_: test_mode) -> ast::ty_ { *j_ += 1u; @@ -244,7 +244,7 @@ pub fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::Ty, .. *fold::default_ast_fold() }; let af = fold::make_fold(afp); - let crate2: @ast::crate = @af.fold_crate(crate); + let crate2: @ast::crate = @af.fold_crate(&crate); *crate2 } diff --git a/src/librustc/driver/driver.rs b/src/librustc/driver/driver.rs index 45bbe5d091e06..61bb03dd7f902 100644 --- a/src/librustc/driver/driver.rs +++ b/src/librustc/driver/driver.rs @@ -396,7 +396,7 @@ pub fn pretty_print_input(sess: Session, +cfg: ast::crate_cfg, input: input, pprust::node_block(s, ref blk) => { pp::space(s.s); pprust::synth_comment( - s, ~"block " + int::to_str((*blk).node.id)); + s, ~"block " + int::to_str(blk.node.id)); } pprust::node_expr(s, expr) => { pp::space(s.s); diff --git a/src/librustc/driver/session.rs b/src/librustc/driver/session.rs index bd35bf50cefc0..94dbfb51eb822 100644 --- a/src/librustc/driver/session.rs +++ b/src/librustc/driver/session.rs @@ -358,7 +358,7 @@ pub mod test { pub fn make_crate_type_attr(+t: ~str) -> ast::attribute { codemap::respan(codemap::dummy_sp(), ast::attribute_ { style: ast::attr_outer, - value: codemap::respan(codemap::dummy_sp(), + value: @codemap::respan(codemap::dummy_sp(), ast::meta_name_value( @~"crate_type", codemap::respan(codemap::dummy_sp(), diff --git a/src/librustc/front/config.rs b/src/librustc/front/config.rs index afb73a6e7c35c..faa1ad9854af7 100644 --- a/src/librustc/front/config.rs +++ b/src/librustc/front/config.rs @@ -45,7 +45,7 @@ pub fn strip_items(crate: @ast::crate, in_cfg: in_cfg_pred) .. *fold::default_ast_fold()}; let fold = fold::make_fold(precursor); - let res = @fold.fold_crate(*crate); + let res = @fold.fold_crate(&*crate); return res; } @@ -63,7 +63,7 @@ fn filter_view_item(cx: @Context, &&view_item: @ast::view_item } } -fn fold_mod(cx: @Context, m: ast::_mod, fld: fold::ast_fold) -> ast::_mod { +fn fold_mod(cx: @Context, m: &ast::_mod, fld: fold::ast_fold) -> ast::_mod { let filtered_items = m.items.filter_mapped(|a| filter_item(cx, *a)); let filtered_view_items = @@ -83,7 +83,7 @@ fn filter_foreign_item(cx: @Context, &&item: @ast::foreign_item) -> fn fold_foreign_mod( cx: @Context, - nm: ast::foreign_mod, + nm: &ast::foreign_mod, fld: fold::ast_fold ) -> ast::foreign_mod { let filtered_items = @@ -98,21 +98,21 @@ fn fold_foreign_mod( } } -fn fold_item_underscore(cx: @Context, +item: ast::item_, +fn fold_item_underscore(cx: @Context, item: &ast::item_, fld: fold::ast_fold) -> ast::item_ { - let item = match item { - ast::item_impl(a, b, c, methods) => { + let item = match *item { + ast::item_impl(ref a, b, c, ref methods) => { let methods = methods.filtered(|m| method_in_cfg(cx, *m) ); - ast::item_impl(a, b, c, methods) + ast::item_impl(/*bad*/ copy *a, b, c, methods) } ast::item_trait(ref a, ref b, ref methods) => { let methods = methods.filtered(|m| trait_method_in_cfg(cx, m) ); ast::item_trait(/*bad*/copy *a, /*bad*/copy *b, methods) } - item => item + ref item => /*bad*/ copy *item }; - fold::noop_fold_item_underscore(item, fld) + fold::noop_fold_item_underscore(&item, fld) } fn filter_stmt(cx: @Context, &&stmt: @ast::stmt) -> @@ -134,7 +134,7 @@ fn filter_stmt(cx: @Context, &&stmt: @ast::stmt) -> fn fold_block( cx: @Context, - b: ast::blk_, + b: &ast::blk_, fld: fold::ast_fold ) -> ast::blk_ { let filtered_stmts = diff --git a/src/librustc/front/core_inject.rs b/src/librustc/front/core_inject.rs index 51b25854bc751..b7df99795924b 100644 --- a/src/librustc/front/core_inject.rs +++ b/src/librustc/front/core_inject.rs @@ -50,7 +50,7 @@ fn inject_libcore_ref(sess: Session, attrs: ~[ spanned(ast::attribute_ { style: ast::attr_inner, - value: spanned(ast::meta_name_value( + value: @spanned(ast::meta_name_value( @~"vers", spanned(ast::lit_str(@CORE_VERSION.to_str())) )), @@ -66,10 +66,13 @@ fn inject_libcore_ref(sess: Session, view_items: vis, ../*bad*/copy crate.module }; - new_module = fld.fold_mod(new_module); + new_module = fld.fold_mod(&new_module); // FIXME #2543: Bad copy. - let new_crate = ast::crate_ { module: new_module, ..copy crate }; + let new_crate = ast::crate_ { + module: new_module, + ..copy *crate + }; (new_crate, span) }, fold_mod: |module, fld| { @@ -95,12 +98,15 @@ fn inject_libcore_ref(sess: Session, let vis = vec::append(~[vi2], module.view_items); // FIXME #2543: Bad copy. - let new_module = ast::_mod { view_items: vis, ..copy module }; - fold::noop_fold_mod(new_module, fld) + let new_module = ast::_mod { + view_items: vis, + ..copy *module + }; + fold::noop_fold_mod(&new_module, fld) }, ..*fold::default_ast_fold() }; let fold = fold::make_fold(precursor); - @fold.fold_crate(*crate) + @fold.fold_crate(crate) } diff --git a/src/librustc/front/test.rs b/src/librustc/front/test.rs index 22333f236ed8e..94b49e9d266e4 100644 --- a/src/librustc/front/test.rs +++ b/src/librustc/front/test.rs @@ -91,7 +91,7 @@ fn generate_test_harness(sess: session::Session, fold_mod: |a,b| fold_mod(cx, a, b),.. *fold::default_ast_fold()}; let fold = fold::make_fold(precursor); - let res = @fold.fold_crate(*crate); + let res = @fold.fold_crate(&*crate); cx.ext_cx.bt_pop(); return res; } @@ -106,7 +106,7 @@ fn strip_test_functions(crate: @ast::crate) -> @ast::crate { } fn fold_mod(cx: @mut TestCtxt, - m: ast::_mod, + m: &ast::_mod, fld: fold::ast_fold) -> ast::_mod { // Remove any #[main] from the AST so it doesn't clash with @@ -125,19 +125,21 @@ fn fold_mod(cx: @mut TestCtxt, items: vec::map(m.items, |i| nomain(cx, *i)), }; - fold::noop_fold_mod(mod_nomain, fld) + fold::noop_fold_mod(&mod_nomain, fld) } fn fold_crate(cx: @mut TestCtxt, - c: ast::crate_, + c: &ast::crate_, fld: fold::ast_fold) -> ast::crate_ { let folded = fold::noop_fold_crate(c, fld); // Add a special __test module to the crate that will contain code // generated for the test harness - ast::crate_ { module: add_test_module(cx, /*bad*/copy folded.module), - .. folded } + ast::crate_ { + module: add_test_module(cx, &folded.module), + .. folded + } } @@ -238,11 +240,11 @@ fn should_fail(i: @ast::item) -> bool { vec::len(attr::find_attrs_by_name(i.attrs, ~"should_fail")) > 0u } -fn add_test_module(cx: &TestCtxt, +m: ast::_mod) -> ast::_mod { +fn add_test_module(cx: &TestCtxt, m: &ast::_mod) -> ast::_mod { let testmod = mk_test_module(cx); ast::_mod { items: vec::append_one(/*bad*/copy m.items, testmod), - .. m + .. /*bad*/ copy *m } } diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index 5963d87806093..cbe2217c9fc9b 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -1009,7 +1009,7 @@ fn get_attributes(md: ebml::Doc) -> ~[ast::attribute] { codemap::spanned { node: ast::attribute_ { style: ast::attr_outer, - value: /*bad*/copy *meta_item, + value: meta_item, is_sugared_doc: false, }, span: codemap::dummy_sp() diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index a950cd04d6771..7d2423c7c5d3b 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -326,7 +326,7 @@ fn encode_path(ecx: @EncodeContext, ebml_w: writer::Encoder, } fn encode_info_for_mod(ecx: @EncodeContext, ebml_w: writer::Encoder, - md: _mod, id: node_id, path: &[ast_map::path_elt], + md: &_mod, id: node_id, path: &[ast_map::path_elt], name: ident) { ebml_w.start_tag(tag_items_data_item); encode_def_id(ebml_w, local_def(id)); @@ -659,7 +659,7 @@ fn encode_info_for_item(ecx: @EncodeContext, ebml_w: writer::Encoder, } ebml_w.end_tag(); } - item_mod(m) => { + item_mod(ref m) => { add_to_index(); encode_info_for_mod(ecx, ebml_w, m, item.id, path, item.ident); } @@ -912,8 +912,8 @@ fn encode_info_for_item(ecx: @EncodeContext, ebml_w: writer::Encoder, // method info, we output static methods with type signatures as // written. Here, we output the *real* type signatures. I feel like // maybe we should only ever handle the real type signatures. - for vec::each((*ms)) |m| { - let ty_m = ast_util::trait_method_to_ty_method(*m); + for ms.each |m| { + let ty_m = ast_util::trait_method_to_ty_method(m); if ty_m.self_ty.node != ast::sty_static { loop; } index.push(entry { val: ty_m.id, pos: ebml_w.writer.tell() }); @@ -995,7 +995,7 @@ fn encode_info_for_items(ecx: @EncodeContext, ebml_w: writer::Encoder, let index = @mut ~[]; ebml_w.start_tag(tag_items_data); index.push(entry { val: crate_node_id, pos: ebml_w.writer.tell() }); - encode_info_for_mod(ecx, ebml_w, crate.node.module, + encode_info_for_mod(ecx, ebml_w, &crate.node.module, crate_node_id, ~[], syntax::parse::token::special_idents::invalid); visit::visit_crate(*crate, (), visit::mk_vt(@visit::Visitor { @@ -1088,7 +1088,7 @@ fn write_int(writer: io::Writer, &&n: int) { writer.write_be_u32(n as u32); } -fn encode_meta_item(ebml_w: writer::Encoder, mi: meta_item) { +fn encode_meta_item(ebml_w: writer::Encoder, mi: @meta_item) { match mi.node { meta_word(name) => { ebml_w.start_tag(tag_meta_item_word); @@ -1118,7 +1118,7 @@ fn encode_meta_item(ebml_w: writer::Encoder, mi: meta_item) { ebml_w.writer.write(str::to_bytes(*name)); ebml_w.end_tag(); for items.each |inner_item| { - encode_meta_item(ebml_w, **inner_item); + encode_meta_item(ebml_w, *inner_item); } ebml_w.end_tag(); } diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index 901e600587e26..d5cb2f8726d17 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -298,7 +298,7 @@ fn encode_ast(ebml_w: writer::Encoder, item: ast::inlined_item) { // nested items, as otherwise it would get confused when translating // inlined items. fn simplify_ast(ii: ast::inlined_item) -> ast::inlined_item { - fn drop_nested_items(blk: ast::blk_, fld: fold::ast_fold) -> ast::blk_ { + fn drop_nested_items(blk: &ast::blk_, fld: fold::ast_fold) -> ast::blk_ { let stmts_sans_items = do blk.stmts.filtered |stmt| { match stmt.node { ast::stmt_expr(_, _) | ast::stmt_semi(_, _) | @@ -317,7 +317,7 @@ fn simplify_ast(ii: ast::inlined_item) -> ast::inlined_item { id: blk.id, rules: blk.rules }; - fold::noop_fold_block(blk_sans_items, fld) + fold::noop_fold_block(&blk_sans_items, fld) } let fld = fold::make_fold(@fold::AstFoldFns { @@ -336,7 +336,7 @@ fn simplify_ast(ii: ast::inlined_item) -> ast::inlined_item { ast::ii_foreign(fld.fold_foreign_item(i)) } ast::ii_dtor(ref dtor, nm, ref tps, parent_id) => { - let dtor_body = fld.fold_block((*dtor).node.body); + let dtor_body = fld.fold_block(&dtor.node.body); ast::ii_dtor( codemap::spanned { node: ast::struct_dtor_ { body: dtor_body, @@ -372,8 +372,8 @@ fn renumber_ast(xcx: @ExtendedDecodeContext, ii: ast::inlined_item) ast::ii_foreign(fld.fold_foreign_item(i)) } ast::ii_dtor(ref dtor, nm, ref generics, parent_id) => { - let dtor_body = fld.fold_block((*dtor).node.body); - let dtor_attrs = fld.fold_attributes(copy dtor.node.attrs); + let dtor_body = fld.fold_block(&dtor.node.body); + let dtor_attrs = fld.fold_attributes(/*bad*/copy (*dtor).node.attrs); let new_generics = fold::fold_generics(generics, fld); let dtor_id = fld.new_id((*dtor).node.id); let new_parent = xcx.tr_def_id(parent_id); diff --git a/src/librustc/middle/borrowck/check_loans.rs b/src/librustc/middle/borrowck/check_loans.rs index f1e52d00beed9..c3555e54519a3 100644 --- a/src/librustc/middle/borrowck/check_loans.rs +++ b/src/librustc/middle/borrowck/check_loans.rs @@ -579,9 +579,9 @@ pub impl CheckLoanCtxt { } } -fn check_loans_in_fn(fk: visit::fn_kind, - decl: ast::fn_decl, - body: ast::blk, +fn check_loans_in_fn(fk: &visit::fn_kind, + decl: &ast::fn_decl, + body: &ast::blk, sp: span, id: ast::node_id, &&self: @mut CheckLoanCtxt, @@ -590,7 +590,7 @@ fn check_loans_in_fn(fk: visit::fn_kind, let fty = ty::node_id_to_type(self.tcx(), id); let declared_purity; - match fk { + match *fk { visit::fk_item_fn(*) | visit::fk_method(*) | visit::fk_dtor(*) => { declared_purity = ty::ty_fn_purity(fty); @@ -611,7 +611,7 @@ fn check_loans_in_fn(fk: visit::fn_kind, do save_and_restore_managed(self.fn_args) { *self.declared_purity = declared_purity; - match fk { + match *fk { visit::fk_anon(*) | visit::fk_fn_block(*) if is_stack_closure => { // inherits the fn_args from enclosing ctxt @@ -753,7 +753,7 @@ fn check_loans_in_expr(expr: @ast::expr, visit::visit_expr(expr, self, vt); } -fn check_loans_in_block(blk: ast::blk, +fn check_loans_in_block(blk: &ast::blk, &&self: @mut CheckLoanCtxt, vt: visit::vt<@mut CheckLoanCtxt>) { do save_and_restore_managed(self.declared_purity) { diff --git a/src/librustc/middle/borrowck/gather_loans.rs b/src/librustc/middle/borrowck/gather_loans.rs index 3d24186f2c15f..ab343456ef463 100644 --- a/src/librustc/middle/borrowck/gather_loans.rs +++ b/src/librustc/middle/borrowck/gather_loans.rs @@ -95,9 +95,9 @@ pub fn gather_loans(bccx: @BorrowckCtxt, crate: @ast::crate) -> ReqMaps { return glcx.req_maps; } -fn req_loans_in_fn(fk: visit::fn_kind, - decl: ast::fn_decl, - body: ast::blk, +fn req_loans_in_fn(fk: &visit::fn_kind, + decl: &ast::fn_decl, + body: &ast::blk, sp: span, id: ast::node_id, &&self: @mut GatherLoanCtxt, @@ -107,7 +107,7 @@ fn req_loans_in_fn(fk: visit::fn_kind, let old_root_ub = self.root_ub; self.root_ub = body.node.id; - match fk { + match *fk { visit::fk_anon(*) | visit::fk_fn_block(*) => {} visit::fk_item_fn(*) | visit::fk_method(*) | visit::fk_dtor(*) => { @@ -269,13 +269,13 @@ fn req_loans_in_expr(ex: @ast::expr, (vt.visit_expr)(cond, self, vt); // during body, can only root for the body - self.root_ub = (*body).node.id; - (vt.visit_block)((*body), self, vt); + self.root_ub = body.node.id; + (vt.visit_block)(body, self, vt); } // see explanation attached to the `root_ub` field: ast::expr_loop(ref body, _) => { - self.root_ub = (*body).node.id; + self.root_ub = body.node.id; visit::visit_expr(ex, self, vt); } diff --git a/src/librustc/middle/check_loop.rs b/src/librustc/middle/check_loop.rs index 34665fe7b7d27..15f64b0fa2f0c 100644 --- a/src/librustc/middle/check_loop.rs +++ b/src/librustc/middle/check_loop.rs @@ -33,10 +33,10 @@ pub fn check_crate(tcx: ty::ctxt, crate: @crate) { match e.node { expr_while(e, ref b) => { (v.visit_expr)(e, cx, v); - (v.visit_block)((*b), Context { in_loop: true,.. cx }, v); + (v.visit_block)(b, Context { in_loop: true,.. cx }, v); } expr_loop(ref b, _) => { - (v.visit_block)((*b), Context { in_loop: true,.. cx }, v); + (v.visit_block)(b, Context { in_loop: true,.. cx }, v); } expr_fn(*) => { visit::visit_expr(e, Context { @@ -45,7 +45,7 @@ pub fn check_crate(tcx: ty::ctxt, crate: @crate) { }, v); } expr_fn_block(_, ref b) => { - (v.visit_block)((*b), Context { + (v.visit_block)(b, Context { in_loop: false, can_ret: false }, v); @@ -53,10 +53,10 @@ pub fn check_crate(tcx: ty::ctxt, crate: @crate) { expr_loop_body(@expr {node: expr_fn_block(_, ref b), _}) => { let sigil = ty::ty_closure_sigil(ty::expr_ty(tcx, e)); let blk = (sigil == BorrowedSigil); - (v.visit_block)((*b), Context { + (v.visit_block)(b, Context { in_loop: true, can_ret: blk - }, v); + }, v); } expr_break(_) => { if !cx.in_loop { diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index fbe28d7ac9b8d..f736c403cfccd 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -673,9 +673,9 @@ pub fn check_local(cx: @MatchCheckCtxt, } pub fn check_fn(cx: @MatchCheckCtxt, - kind: visit::fn_kind, - decl: fn_decl, - body: blk, + kind: &visit::fn_kind, + decl: &fn_decl, + body: &blk, sp: span, id: node_id, &&s: (), diff --git a/src/librustc/middle/freevars.rs b/src/librustc/middle/freevars.rs index a3ef3833d9755..7d2ab1700ddb1 100644 --- a/src/librustc/middle/freevars.rs +++ b/src/librustc/middle/freevars.rs @@ -39,7 +39,7 @@ pub type freevar_map = HashMap; // Since we want to be able to collect upvars in some arbitrary piece // of the AST, we take a walker function that we invoke with a visitor // in order to start the search. -fn collect_freevars(def_map: resolve::DefMap, blk: ast::blk) +fn collect_freevars(def_map: resolve::DefMap, blk: &ast::blk) -> freevar_info { let seen = HashMap(); let refs = @mut ~[]; @@ -100,8 +100,8 @@ pub fn annotate_freevars(def_map: resolve::DefMap, crate: @ast::crate) -> freevar_map { let freevars = HashMap(); - let walk_fn = fn@(_fk: visit::fn_kind, _decl: ast::fn_decl, - blk: ast::blk, _sp: span, nid: ast::node_id) { + let walk_fn = fn@(_fk: &visit::fn_kind, _decl: &ast::fn_decl, + blk: &ast::blk, _sp: span, nid: ast::node_id) { let vars = collect_freevars(def_map, blk); freevars.insert(nid, vars); }; diff --git a/src/librustc/middle/kind.rs b/src/librustc/middle/kind.rs index b08adef88c98c..1b1e9afa924d5 100644 --- a/src/librustc/middle/kind.rs +++ b/src/librustc/middle/kind.rs @@ -153,8 +153,14 @@ fn with_appropriate_checker(cx: Context, id: node_id, b: fn(check_fn)) { // Check that the free variables used in a shared/sendable closure conform // to the copy/move kind bounds. Then recursively check the function body. -fn check_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, sp: span, - fn_id: node_id, cx: Context, v: visit::vt) { +fn check_fn( + fk: &visit::fn_kind, + decl: &fn_decl, + body: &blk, + sp: span, + fn_id: node_id, + cx: Context, + v: visit::vt) { // Check kinds on free variables: do with_appropriate_checker(cx, fn_id) |chk| { @@ -166,7 +172,7 @@ fn check_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, sp: span, visit::visit_fn(fk, decl, body, sp, fn_id, cx, v); } -fn check_arm(a: arm, cx: Context, v: visit::vt) { +fn check_arm(a: &arm, cx: Context, v: visit::vt) { for vec::each(a.pats) |p| { do pat_util::pat_bindings(cx.tcx.def_map, *p) |mode, id, span, _pth| { if mode == bind_by_copy { diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index 16f1d36e05c7c..d399f0e6886e6 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -324,7 +324,7 @@ struct LanguageItemCollector { pub impl LanguageItemCollector { fn match_and_collect_meta_item(&self, item_def_id: def_id, - meta_item: meta_item) { + meta_item: @meta_item) { match meta_item.node { meta_name_value(key, literal) => { match literal.node { @@ -376,10 +376,10 @@ pub impl LanguageItemCollector { visit_item: |item| { for item.attrs.each |attribute| { unsafe { - (*this).match_and_collect_meta_item(local_def(item - .id), - attribute.node - .value); + (*this).match_and_collect_meta_item( + local_def(item.id), + attribute.node.value + ); } } }, diff --git a/src/librustc/middle/lint.rs b/src/librustc/middle/lint.rs index b14245afa9a19..8bcc573ac36cb 100644 --- a/src/librustc/middle/lint.rs +++ b/src/librustc/middle/lint.rs @@ -753,7 +753,7 @@ fn check_item_structural_records(cx: ty::ctxt, it: @ast::item) { fn check_item_ctypes(cx: ty::ctxt, it: @ast::item) { fn check_foreign_fn(cx: ty::ctxt, fn_id: ast::node_id, - decl: ast::fn_decl) { + decl: &ast::fn_decl) { let tys = vec::map(decl.inputs, |a| a.ty ); for vec::each(vec::append_one(tys, decl.output)) |ty| { match ty.node { @@ -786,9 +786,9 @@ fn check_item_ctypes(cx: ty::ctxt, it: @ast::item) { if attr::foreign_abi(it.attrs) != either::Right(ast::foreign_abi_rust_intrinsic) => { for nmod.items.each |ni| { - match ni.node { + match /*bad*/copy ni.node { ast::foreign_item_fn(ref decl, _, _) => { - check_foreign_fn(cx, it.id, *decl); + check_foreign_fn(cx, it.id, decl); } // FIXME #4622: Not implemented. ast::foreign_item_const(*) => {} @@ -950,13 +950,13 @@ fn check_item_non_camel_case_types(cx: ty::ctxt, it: @ast::item) { } } -fn check_fn(tcx: ty::ctxt, fk: visit::fn_kind, decl: ast::fn_decl, - _body: ast::blk, span: span, id: ast::node_id) { +fn check_fn(tcx: ty::ctxt, fk: &visit::fn_kind, decl: &ast::fn_decl, + _body: &ast::blk, span: span, id: ast::node_id) { debug!("lint check_fn fk=%? id=%?", fk, id); // don't complain about blocks, since they tend to get their modes // specified from the outside - match fk { + match *fk { visit::fk_fn_block(*) => { return; } _ => {} } @@ -965,7 +965,7 @@ fn check_fn(tcx: ty::ctxt, fk: visit::fn_kind, decl: ast::fn_decl, check_fn_deprecated_modes(tcx, fn_ty, decl, span, id); } -fn check_fn_deprecated_modes(tcx: ty::ctxt, fn_ty: ty::t, decl: ast::fn_decl, +fn check_fn_deprecated_modes(tcx: ty::ctxt, fn_ty: ty::t, decl: &ast::fn_decl, span: span, id: ast::node_id) { match ty::get(fn_ty).sty { ty::ty_closure(ty::ClosureTy {sig: ref sig, _}) | @@ -1017,7 +1017,7 @@ fn check_fn_deprecated_modes(tcx: ty::ctxt, fn_ty: ty::t, decl: ast::fn_decl, ast::ty_closure(@ast::TyClosure{decl: ref d, _}) | ast::ty_bare_fn(@ast::TyBareFn{decl: ref d, _})=>{ check_fn_deprecated_modes(tcx, arg_ty.ty, - *d, span, id); + d, span, id); } ast::ty_path(*) => { // This is probably a typedef, so we can't @@ -1053,7 +1053,7 @@ fn check_item_deprecated_modes(tcx: ty::ctxt, it: @ast::item) { ast::ty_bare_fn(@ast::TyBareFn {decl: ref decl, _}) => { let fn_ty = ty::node_id_to_type(tcx, it.id); check_fn_deprecated_modes( - tcx, fn_ty, *decl, ty.span, it.id) + tcx, fn_ty, decl, ty.span, it.id) } _ => () } diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 3753c93f13ade..5286fa1025a76 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -435,9 +435,9 @@ pub impl IrMaps { } } -fn visit_fn(fk: visit::fn_kind, - decl: fn_decl, - body: blk, +fn visit_fn(fk: &visit::fn_kind, + decl: &fn_decl, + body: &blk, sp: span, id: node_id, &&self: @mut IrMaps, @@ -465,7 +465,7 @@ fn visit_fn(fk: visit::fn_kind, }; // Add `self`, whether explicit or implicit. - match fk { + match *fk { fk_method(_, _, method) => { match method.self_ty.node { sty_by_ref => { @@ -540,7 +540,7 @@ fn visit_local(local: @local, &&self: @mut IrMaps, vt: vt<@mut IrMaps>) { visit::visit_local(local, self, vt); } -fn visit_arm(arm: arm, &&self: @mut IrMaps, vt: vt<@mut IrMaps>) { +fn visit_arm(arm: &arm, &&self: @mut IrMaps, vt: vt<@mut IrMaps>) { let def_map = self.tcx.def_map; for arm.pats.each |pat| { do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| { @@ -976,7 +976,7 @@ pub impl Liveness { // _______________________________________________________________________ - fn compute(&self, decl: fn_decl, body: blk) -> LiveNode { + fn compute(&self, decl: &fn_decl, body: &blk) -> LiveNode { // if there is a `break` or `again` at the top level, then it's // effectively a return---this only occurs in `for` loops, // where the body is really a closure. @@ -1001,7 +1001,7 @@ pub impl Liveness { entry_ln } - fn propagate_through_fn_block(&self, decl: fn_decl, blk: blk) + fn propagate_through_fn_block(&self, decl: &fn_decl, blk: &blk) -> LiveNode { // inputs passed by & mode should be considered live on exit: for decl.inputs.each |arg| { @@ -1035,7 +1035,7 @@ pub impl Liveness { self.propagate_through_block(blk, self.s.fallthrough_ln) } - fn propagate_through_block(&self, blk: blk, succ: LiveNode) -> LiveNode { + fn propagate_through_block(&self, blk: &blk, succ: LiveNode) -> LiveNode { let succ = self.propagate_through_opt_expr(blk.node.expr, succ); do blk.node.stmts.foldr(succ) |stmt, succ| { self.propagate_through_stmt(*stmt, succ) @@ -1131,7 +1131,7 @@ pub impl Liveness { The next-node for a break is the successor of the entire loop. The next-node for a continue is the top of this loop. */ - self.with_loop_nodes((*blk).node.id, succ, + self.with_loop_nodes(blk.node.id, succ, self.live_node(expr.id, expr.span), || { // the construction of a closure itself is not important, @@ -1161,7 +1161,7 @@ pub impl Liveness { // ( succ ) // let else_ln = self.propagate_through_opt_expr(els, succ); - let then_ln = self.propagate_through_block((*then), succ); + let then_ln = self.propagate_through_block(then, succ); let ln = self.live_node(expr.id, expr.span); self.init_from_succ(ln, else_ln); self.merge_from_succ(ln, then_ln, false); @@ -1169,13 +1169,13 @@ pub impl Liveness { } expr_while(cond, ref blk) => { - self.propagate_through_loop(expr, Some(cond), (*blk), succ) + self.propagate_through_loop(expr, Some(cond), blk, succ) } // Note that labels have been resolved, so we don't need to look // at the label ident expr_loop(ref blk, _) => { - self.propagate_through_loop(expr, None, (*blk), succ) + self.propagate_through_loop(expr, None, blk, succ) } expr_match(e, ref arms) => { @@ -1196,9 +1196,9 @@ pub impl Liveness { let ln = self.live_node(expr.id, expr.span); self.init_empty(ln, succ); let mut first_merge = true; - for (*arms).each |arm| { + for arms.each |arm| { let body_succ = - self.propagate_through_block(arm.body, succ); + self.propagate_through_block(&arm.body, succ); let guard_succ = self.propagate_through_opt_expr(arm.guard, body_succ); let arm_succ = @@ -1359,7 +1359,7 @@ pub impl Liveness { } expr_block(ref blk) => { - self.propagate_through_block((*blk), succ) + self.propagate_through_block(blk, succ) } expr_mac(*) => { @@ -1460,7 +1460,7 @@ pub impl Liveness { fn propagate_through_loop(&self, expr: @expr, cond: Option<@expr>, - body: blk, + body: &blk, succ: LiveNode) -> LiveNode { /* @@ -1565,7 +1565,7 @@ fn check_local(local: @local, &&self: @Liveness, vt: vt<@Liveness>) { visit::visit_local(local, self, vt); } -fn check_arm(arm: arm, &&self: @Liveness, vt: vt<@Liveness>) { +fn check_arm(arm: &arm, &&self: @Liveness, vt: vt<@Liveness>) { do self.arm_pats_bindings(arm.pats) |ln, var, sp| { self.warn_about_unused(sp, ln, var); } @@ -1636,8 +1636,8 @@ fn check_expr(expr: @expr, &&self: @Liveness, vt: vt<@Liveness>) { } } -fn check_fn(_fk: visit::fn_kind, _decl: fn_decl, - _body: blk, _sp: span, _id: node_id, +fn check_fn(_fk: &visit::fn_kind, _decl: &fn_decl, + _body: &blk, _sp: span, _id: node_id, &&_self: @Liveness, _v: vt<@Liveness>) { // do not check contents of nested fns } @@ -1650,7 +1650,7 @@ enum ReadKind { } pub impl @Liveness { - fn check_ret(&self, id: node_id, sp: span, _fk: visit::fn_kind, + fn check_ret(&self, id: node_id, sp: span, _fk: &visit::fn_kind, entry_ln: LiveNode) { if self.live_on_entry(entry_ln, self.s.no_ret_var).is_some() { // if no_ret_var is live, then we fall off the end of the @@ -1882,7 +1882,7 @@ pub impl @Liveness { if name[0] == ('_' as u8) { None } else { Some(name) } } - fn warn_about_unused_args(&self, decl: fn_decl, entry_ln: LiveNode) { + fn warn_about_unused_args(&self, decl: &fn_decl, entry_ln: LiveNode) { for decl.inputs.each |arg| { do pat_util::pat_bindings(self.tcx.def_map, arg.pat) |_bm, p_id, sp, _n| { diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 19accd25c12c3..95764050898cd 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -221,7 +221,7 @@ pub fn record_parent(cx: ctxt, child_id: ast::node_id) { } } -pub fn resolve_block(blk: ast::blk, cx: ctxt, visitor: visit::vt) { +pub fn resolve_block(blk: &ast::blk, cx: ctxt, visitor: visit::vt) { // Record the parent of this block. record_parent(cx, blk.node.id); @@ -230,7 +230,7 @@ pub fn resolve_block(blk: ast::blk, cx: ctxt, visitor: visit::vt) { visit::visit_block(blk, new_cx, visitor); } -pub fn resolve_arm(arm: ast::arm, cx: ctxt, visitor: visit::vt) { +pub fn resolve_arm(arm: &ast::arm, cx: ctxt, visitor: visit::vt) { visit::visit_arm(arm, cx, visitor); } @@ -316,14 +316,14 @@ pub fn resolve_item(item: @ast::item, cx: ctxt, visitor: visit::vt) { visit::visit_item(item, new_cx, visitor); } -pub fn resolve_fn(fk: visit::fn_kind, - decl: ast::fn_decl, - body: ast::blk, +pub fn resolve_fn(fk: &visit::fn_kind, + decl: &ast::fn_decl, + body: &ast::blk, sp: span, id: ast::node_id, cx: ctxt, visitor: visit::vt) { - let fn_cx = match fk { + let fn_cx = match *fk { visit::fk_item_fn(*) | visit::fk_method(*) | visit::fk_dtor(*) => { // Top-level functions are a root scope. @@ -337,7 +337,7 @@ pub fn resolve_fn(fk: visit::fn_kind, }; // Record the ID of `self`. - match fk { + match *fk { visit::fk_method(_, _, method) => { cx.region_map.insert(method.self_id, body.node.id); } @@ -607,9 +607,9 @@ pub fn determine_rp_in_item(item: @ast::item, } } -pub fn determine_rp_in_fn(fk: visit::fn_kind, - decl: ast::fn_decl, - body: ast::blk, +pub fn determine_rp_in_fn(fk: &visit::fn_kind, + decl: &ast::fn_decl, + body: &ast::blk, _: span, _: ast::node_id, &&cx: @mut DetermineRpCtxt, @@ -627,7 +627,7 @@ pub fn determine_rp_in_fn(fk: visit::fn_kind, } } -pub fn determine_rp_in_ty_method(ty_m: ast::ty_method, +pub fn determine_rp_in_ty_method(ty_m: &ast::ty_method, &&cx: @mut DetermineRpCtxt, visitor: visit::vt<@mut DetermineRpCtxt>) { do cx.with(cx.item_id, false) { diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index 924886881efb6..1c4928fd37454 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -1029,7 +1029,7 @@ pub impl Resolver { } } - fn block_needs_anonymous_module(@mut self, block: blk) -> bool { + fn block_needs_anonymous_module(@mut self, block: &blk) -> bool { // If the block has view items, we need an anonymous module. if block.node.view_items.len() > 0 { return true; @@ -1081,7 +1081,7 @@ pub impl Resolver { let privacy = visibility_to_privacy(item.vis); match /*bad*/copy item.node { - item_mod(module_) => { + item_mod(ref module_) => { let (name_bindings, new_parent) = self.add_child(ident, parent, ForbidDuplicateModules, sp); @@ -1278,7 +1278,7 @@ pub impl Resolver { // methods, so check that first. let mut has_static_methods = false; for (*methods).each |method| { - let ty_m = trait_method_to_ty_method(*method); + let ty_m = trait_method_to_ty_method(method); match ty_m.self_ty.node { sty_static => { has_static_methods = true; @@ -1306,7 +1306,7 @@ pub impl Resolver { // Add the names of all the methods to the trait info. let method_names = @HashMap(); for (*methods).each |method| { - let ty_m = trait_method_to_ty_method(*method); + let ty_m = trait_method_to_ty_method(method); let ident = ty_m.ident; // Add it to the trait info if not static, @@ -1537,7 +1537,7 @@ pub impl Resolver { } fn build_reduced_graph_for_block(@mut self, - block: blk, + block: &blk, parent: ReducedGraphParent, &&visitor: vt) { let mut new_parent; @@ -3729,7 +3729,7 @@ pub impl Resolver { visitor); } - item_mod(module_) => { + item_mod(ref module_) => { do self.with_scope(Some(item.ident)) { self.resolve_module(module_, item.span, item.ident, item.id, visitor); @@ -3788,7 +3788,7 @@ pub impl Resolver { item.id, 0, OpaqueFunctionRibKind), - (*block), + block, NoSelfBinding, visitor); } @@ -3866,7 +3866,7 @@ pub impl Resolver { rib_kind: RibKind, optional_declaration: Option<@fn_decl>, type_parameters: TypeParameters, - block: blk, + block: &blk, self_binding: SelfBinding, visitor: ResolveVisitor) { // Create a value rib for the function. @@ -3980,7 +3980,7 @@ pub impl Resolver { self.resolve_function(NormalRibKind, None, NoTypeParameters, - (*destructor).node.body, + &destructor.node.body, HasSelfBinding ((*destructor).node.self_id, true), @@ -4013,7 +4013,7 @@ pub impl Resolver { self.resolve_function(rib_kind, Some(@/*bad*/copy method.decl), type_parameters, - method.body, + &method.body, self_binding, visitor); } @@ -4095,7 +4095,7 @@ pub impl Resolver { } fn resolve_module(@mut self, - module_: _mod, + module_: &_mod, span: span, _name: ident, id: node_id, @@ -4137,7 +4137,7 @@ pub impl Resolver { return result; } - fn check_consistent_bindings(@mut self, arm: arm) { + fn check_consistent_bindings(@mut self, arm: &arm) { if arm.pats.len() == 0 { return; } let map_0 = self.binding_mode_map(arm.pats[0]); for arm.pats.eachi() |i, p| { @@ -4176,7 +4176,7 @@ pub impl Resolver { } } - fn resolve_arm(@mut self, arm: arm, visitor: ResolveVisitor) { + fn resolve_arm(@mut self, arm: &arm, visitor: ResolveVisitor) { (*self.value_ribs).push(@Rib(NormalRibKind)); let bindings_list = HashMap(); @@ -4190,12 +4190,12 @@ pub impl Resolver { self.check_consistent_bindings(arm); visit_expr_opt(arm.guard, (), visitor); - self.resolve_block(arm.body, visitor); + self.resolve_block(&arm.body, visitor); (*self.value_ribs).pop(); } - fn resolve_block(@mut self, block: blk, visitor: ResolveVisitor) { + fn resolve_block(@mut self, block: &blk, visitor: ResolveVisitor) { debug!("(resolving block) entering block"); (*self.value_ribs).push(@Rib(NormalRibKind)); @@ -4954,7 +4954,7 @@ pub impl Resolver { self.resolve_function(FunctionRibKind(expr.id, block.node.id), Some(@/*bad*/copy *fn_decl), NoTypeParameters, - (*block), + block, NoSelfBinding, visitor); } diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index 47c0021fd7901..d1472f63ae746 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -2100,7 +2100,7 @@ pub fn trans_item(ccx: @CrateContext, item: ast::item) { vec::append(/*bad*/copy *path, ~[path_name(item.ident)]), decl, body, llfndecl, no_self, None, item.id, None); } else { - for vec::each((*body).node.stmts) |stmt| { + for body.node.stmts.each |stmt| { match stmt.node { ast::stmt_decl(@codemap::spanned { node: ast::decl_item(i), _ }, _) => { @@ -2115,7 +2115,7 @@ pub fn trans_item(ccx: @CrateContext, item: ast::item) { meth::trans_impl(ccx, /*bad*/copy *path, item.ident, *ms, generics, None, item.id); } - ast::item_mod(m) => { + ast::item_mod(ref m) => { trans_mod(ccx, m); } ast::item_enum(ref enum_definition, ref tps) => { @@ -2128,11 +2128,10 @@ pub fn trans_item(ccx: @CrateContext, item: ast::item) { } } ast::item_const(_, expr) => consts::trans_const(ccx, expr, item.id), - ast::item_foreign_mod(foreign_mod) => { + ast::item_foreign_mod(ref foreign_mod) => { let abi = match attr::foreign_abi(item.attrs) { - either::Right(abi_) => abi_, - either::Left(ref msg) => ccx.sess.span_fatal(item.span, - /*bad*/copy *msg) + Right(abi_) => abi_, + Left(ref msg) => ccx.sess.span_fatal(item.span, /*bad*/copy *msg) }; foreign::trans_foreign_mod(ccx, foreign_mod, abi); } @@ -2172,9 +2171,9 @@ pub fn trans_struct_def(ccx: @CrateContext, struct_def: @ast::struct_def, // separate modules in the compiled program. That's because modules exist // only as a convenience for humans working with the code, to organize names // and control visibility. -pub fn trans_mod(ccx: @CrateContext, m: ast::_mod) { +pub fn trans_mod(ccx: @CrateContext, m: &ast::_mod) { let _icx = ccx.insn_ctxt("trans_mod"); - for vec::each(m.items) |item| { + for m.items.each |item| { trans_item(ccx, **item); } } @@ -3027,8 +3026,12 @@ pub fn trans_crate(sess: session::Session, let symbol_hasher = @hash::default_state(); let link_meta = link::build_link_meta(sess, crate, output, symbol_hasher); - let reachable = reachable::find_reachable(crate.node.module, emap2, tcx, - maps.method_map); + let reachable = reachable::find_reachable( + &crate.node.module, + emap2, + tcx, + maps.method_map + ); // Append ".rc" to crate name as LLVM module identifier. // @@ -3145,7 +3148,7 @@ pub fn trans_crate(sess: session::Session, { let _icx = ccx.insn_ctxt("text"); - trans_mod(ccx, crate.node.module); + trans_mod(ccx, &crate.node.module); } decl_gc_metadata(ccx, llmod_id); diff --git a/src/librustc/middle/trans/callee.rs b/src/librustc/middle/trans/callee.rs index 038a2c7ae4b40..12864f12abd99 100644 --- a/src/librustc/middle/trans/callee.rs +++ b/src/librustc/middle/trans/callee.rs @@ -417,7 +417,7 @@ pub fn trans_lang_call_with_type_params(bcx: block, ArgVals(args), dest, DontAutorefArg); } -pub fn body_contains_ret(body: ast::blk) -> bool { +pub fn body_contains_ret(body: &ast::blk) -> bool { let cx = @mut false; visit::visit_block(body, cx, visit::mk_vt(@visit::Visitor { visit_item: |_i, _cx, _v| { }, @@ -451,7 +451,7 @@ pub fn trans_call_inner( ast::expr_loop_body(@ast::expr { node: ast::expr_fn_block(_, ref body), _ - }) => body_contains_ret((*body)), + }) => body_contains_ret(body), _ => false } } diff --git a/src/librustc/middle/trans/foreign.rs b/src/librustc/middle/trans/foreign.rs index aa44e8b1fe9eb..6c0c73dd016f2 100644 --- a/src/librustc/middle/trans/foreign.rs +++ b/src/librustc/middle/trans/foreign.rs @@ -207,7 +207,7 @@ fn build_wrap_fn_(ccx: @CrateContext, // function itself is unnecessary). We used to do this, in fact, and will // perhaps do so in the future. pub fn trans_foreign_mod(ccx: @CrateContext, - foreign_mod: ast::foreign_mod, + foreign_mod: &ast::foreign_mod, abi: ast::foreign_abi) { let _icx = ccx.insn_ctxt("foreign::trans_foreign_mod"); diff --git a/src/librustc/middle/trans/meth.rs b/src/librustc/middle/trans/meth.rs index be1a3b90b4c37..d2bf034165dd9 100644 --- a/src/librustc/middle/trans/meth.rs +++ b/src/librustc/middle/trans/meth.rs @@ -307,7 +307,7 @@ pub fn trans_static_method_callee(bcx: block, let mname = if method_id.crate == ast::local_crate { match bcx.tcx().items.get(&method_id.node) { ast_map::node_trait_method(trait_method, _, _) => { - ast_util::trait_method_to_ty_method(*trait_method).ident + ast_util::trait_method_to_ty_method(trait_method).ident } _ => fail!(~"callee is not a trait method") } diff --git a/src/librustc/middle/trans/reachable.rs b/src/librustc/middle/trans/reachable.rs index a8fb909a5a0a1..17abebad600dc 100644 --- a/src/librustc/middle/trans/reachable.rs +++ b/src/librustc/middle/trans/reachable.rs @@ -40,7 +40,7 @@ struct ctx { rmap: map } -pub fn find_reachable(crate_mod: _mod, exp_map2: resolve::ExportMap2, +pub fn find_reachable(crate_mod: &_mod, exp_map2: resolve::ExportMap2, tcx: ty::ctxt, method_map: typeck::method_map) -> map { let rmap = HashMap(); let cx = ctx { @@ -87,10 +87,10 @@ fn traverse_def_id(cx: ctx, did: def_id) { } } -fn traverse_public_mod(cx: ctx, mod_id: node_id, m: _mod) { +fn traverse_public_mod(cx: ctx, mod_id: node_id, m: &_mod) { if !traverse_exports(cx, mod_id) { // No exports, so every local item is exported - for vec::each(m.items) |item| { + for m.items.each |item| { traverse_public_item(cx, *item); } } @@ -99,11 +99,11 @@ fn traverse_public_mod(cx: ctx, mod_id: node_id, m: _mod) { fn traverse_public_item(cx: ctx, item: @item) { if cx.rmap.contains_key(&item.id) { return; } cx.rmap.insert(item.id, ()); - match item.node { - item_mod(ref m) => traverse_public_mod(cx, item.id, *m), + match /*bad*/copy item.node { + item_mod(ref m) => traverse_public_mod(cx, item.id, m), item_foreign_mod(ref nm) => { if !traverse_exports(cx, item.id) { - for vec::each(nm.items) |item| { + for nm.items.each |item| { cx.rmap.insert(item.id, ()); } } @@ -111,7 +111,7 @@ fn traverse_public_item(cx: ctx, item: @item) { item_fn(_, _, ref generics, ref blk) => { if generics.ty_params.len() > 0u || attr::find_inline_attr(item.attrs) != attr::ia_none { - traverse_inline_body(cx, (*blk)); + traverse_inline_body(cx, blk); } } item_impl(ref generics, _, _, ref ms) => { @@ -121,7 +121,7 @@ fn traverse_public_item(cx: ctx, item: @item) { attr::find_inline_attr(m.attrs) != attr::ia_none { cx.rmap.insert(m.id, ()); - traverse_inline_body(cx, m.body); + traverse_inline_body(cx, &m.body); } } } @@ -129,12 +129,12 @@ fn traverse_public_item(cx: ctx, item: @item) { for struct_def.ctor_id.each |&ctor_id| { cx.rmap.insert(ctor_id, ()); } - do option::iter(&struct_def.dtor) |dtor| { + do struct_def.dtor.iter |dtor| { cx.rmap.insert(dtor.node.id, ()); if generics.ty_params.len() > 0u || attr::find_inline_attr(dtor.node.attrs) != attr::ia_none { - traverse_inline_body(cx, dtor.node.body); + traverse_inline_body(cx, &dtor.node.body); } } } @@ -173,7 +173,7 @@ fn traverse_ty(ty: @Ty, cx: ctx, v: visit::vt) { } } -fn traverse_inline_body(cx: ctx, body: blk) { +fn traverse_inline_body(cx: ctx, body: &blk) { fn traverse_expr(e: @expr, cx: ctx, v: visit::vt) { match e.node { expr_path(_) => { @@ -222,16 +222,19 @@ fn traverse_inline_body(cx: ctx, body: blk) { fn traverse_item(i: @item, cx: ctx, _v: visit::vt) { traverse_public_item(cx, i); } - visit::visit_block(body, cx, visit::mk_vt(@visit::Visitor { + visit::visit_block(body, cx, visit::mk_vt(@visit::Visitor { visit_expr: traverse_expr, visit_item: traverse_item, ..*visit::default_visitor() })); } -fn traverse_all_resources_and_impls(cx: ctx, crate_mod: _mod) { +fn traverse_all_resources_and_impls(cx: ctx, crate_mod: &_mod) { visit::visit_mod( - crate_mod, codemap::dummy_sp(), 0, cx, + crate_mod, + codemap::dummy_sp(), + 0, + cx, visit::mk_vt(@visit::Visitor { visit_expr: |_e, _cx, _v| { }, visit_item: |i, cx, v| { diff --git a/src/librustc/middle/trans/type_use.rs b/src/librustc/middle/trans/type_use.rs index 734b4ea53daa9..ff92f265cc06a 100644 --- a/src/librustc/middle/trans/type_use.rs +++ b/src/librustc/middle/trans/type_use.rs @@ -107,7 +107,7 @@ pub fn type_uses_for(ccx: @CrateContext, fn_id: def_id, n_tps: uint) ast_map::node_item(@ast::item { node: item_fn(_, _, _, ref body), _ }, _) | ast_map::node_method(@ast::method {body: ref body, _}, _, _) => { - handle_body(cx, (*body)); + handle_body(cx, body); } ast_map::node_trait_method(*) => { // This will be a static trait method. For now, we just assume @@ -163,8 +163,8 @@ pub fn type_uses_for(ccx: @CrateContext, fn_id: def_id, n_tps: uint) for uint::range(0u, n_tps) |n| { cx.uses[n] |= flags;} } } - ast_map::node_dtor(_, dtor, _, _) => { - handle_body(cx, dtor.node.body); + ast_map::node_dtor(_, ref dtor, _, _) => { + handle_body(cx, &dtor.node.body); } ast_map::node_struct_ctor(*) => { // Similarly to node_variant, this monomorphized function just uses @@ -363,7 +363,7 @@ pub fn mark_for_expr(cx: Context, e: @expr) { } } -pub fn handle_body(cx: Context, body: blk) { +pub fn handle_body(cx: Context, body: &blk) { let v = visit::mk_vt(@visit::Visitor { visit_expr: |e, cx, v| { visit::visit_expr(e, cx, v); diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index b9bd0e6b58113..cabda54eeea00 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -3811,7 +3811,7 @@ pub fn item_path(cx: ctxt, id: ast::def_id) -> ast_map::path { ast_map::path_name(method.ident)) } ast_map::node_trait_method(trait_method, _, path) => { - let method = ast_util::trait_method_to_ty_method(*trait_method); + let method = ast_util::trait_method_to_ty_method(&*trait_method); vec::append_one(/*bad*/copy *path, ast_map::path_name(method.ident)) } diff --git a/src/librustc/middle/typeck/astconv.rs b/src/librustc/middle/typeck/astconv.rs index d0affaa170218..5c05723bb3f04 100644 --- a/src/librustc/middle/typeck/astconv.rs +++ b/src/librustc/middle/typeck/astconv.rs @@ -333,14 +333,14 @@ pub fn ast_ty_to_ty( }; ty::mk_rec(tcx, flds) } - ast::ty_bare_fn(bf) => { + ast::ty_bare_fn(ref bf) => { ty::mk_bare_fn(tcx, ty_of_bare_fn(self, rscope, bf.purity, - bf.abi, bf.decl)) + bf.abi, &bf.decl)) } - ast::ty_closure(f) => { + ast::ty_closure(ref f) => { let fn_decl = ty_of_closure(self, rscope, f.sigil, f.purity, f.onceness, - f.region, f.decl, None, + f.region, &f.decl, None, ast_ty.span); ty::mk_closure(tcx, fn_decl) } @@ -474,7 +474,7 @@ pub fn ty_of_bare_fn( rscope: RS, purity: ast::purity, abi: ast::Abi, - decl: ast::fn_decl) + decl: &ast::fn_decl) -> ty::BareFnTy { debug!("ty_of_fn_decl"); @@ -502,7 +502,7 @@ pub fn ty_of_closure( purity: ast::purity, onceness: ast::Onceness, opt_region: Option<@ast::region>, - decl: ast::fn_decl, + decl: &ast::fn_decl, expected_tys: Option, span: span) -> ty::ClosureTy { diff --git a/src/librustc/middle/typeck/check/_match.rs b/src/librustc/middle/typeck/check/_match.rs index 795ea4323fe82..39b2a2efdd817 100644 --- a/src/librustc/middle/typeck/check/_match.rs +++ b/src/librustc/middle/typeck/check/_match.rs @@ -58,7 +58,7 @@ pub fn check_match(fcx: @mut FnCtxt, Some(e) => { check_expr_has_type(fcx, e, ty::mk_bool(tcx)); }, None => () } - if !check_block(fcx, arm.body) { arm_non_bot = true; } + if !check_block(fcx, &arm.body) { arm_non_bot = true; } let bty = fcx.node_ty(arm.body.node.id); demand::suptype(fcx, arm.body.span, result_ty, bty); } diff --git a/src/librustc/middle/typeck/check/mod.rs b/src/librustc/middle/typeck/check/mod.rs index a3bfb31db1d11..9ca8268171b0b 100644 --- a/src/librustc/middle/typeck/check/mod.rs +++ b/src/librustc/middle/typeck/check/mod.rs @@ -246,7 +246,7 @@ pub fn check_item_types(ccx: @mut CrateCtxt, crate: @ast::crate) { pub fn check_bare_fn(ccx: @mut CrateCtxt, decl: &ast::fn_decl, - body: ast::blk, + body: &ast::blk, id: ast::node_id, self_info: Option) { let fty = ty::node_id_to_type(ccx.tcx, id); @@ -266,7 +266,7 @@ pub fn check_fn(ccx: @mut CrateCtxt, sigil: Option, fn_sig: &ty::FnSig, decl: &ast::fn_decl, - body: ast::blk, + body: &ast::blk, fn_kind: FnKind, old_fcx: Option<@mut FnCtxt>) { let tcx = ccx.tcx; @@ -384,7 +384,7 @@ pub fn check_fn(ccx: @mut CrateCtxt, fn gather_locals(fcx: @mut FnCtxt, decl: &ast::fn_decl, - body: ast::blk, + body: &ast::blk, arg_tys: &[ty::t], self_info: Option) { let tcx = fcx.ccx.tcx; @@ -462,7 +462,7 @@ pub fn check_fn(ccx: @mut CrateCtxt, visit::visit_pat(p, e, v); }; - let visit_block = fn@(b: ast::blk, &&e: (), v: visit::vt<()>) { + let visit_block = fn@(b: &ast::blk, &&e: (), v: visit::vt<()>) { // non-obvious: the `blk` variable maps to region lb, so // we have to keep this up-to-date. This // is... unfortunate. It'd be nice to not need this. @@ -472,8 +472,8 @@ pub fn check_fn(ccx: @mut CrateCtxt, }; // Don't descend into fns and items - fn visit_fn(_fk: visit::fn_kind, _decl: ast::fn_decl, - _body: ast::blk, _sp: span, + fn visit_fn(_fk: &visit::fn_kind, _decl: &ast::fn_decl, + _body: &ast::blk, _sp: span, _id: ast::node_id, &&_t: (), _v: visit::vt<()>) { } fn visit_item(_i: @ast::item, &&_e: (), _v: visit::vt<()>) { } @@ -500,7 +500,13 @@ pub fn check_method(ccx: @mut CrateCtxt, def_id: self_impl_def_id, explicit_self: method.self_ty }; - check_bare_fn(ccx, &method.decl, method.body, method.id, Some(self_info)); + check_bare_fn( + ccx, + &method.decl, + &method.body, + method.id, + Some(self_info) + ); } pub fn check_no_duplicate_fields(tcx: ty::ctxt, @@ -544,9 +550,13 @@ pub fn check_struct(ccx: @mut CrateCtxt, }; // typecheck the dtor let dtor_dec = ast_util::dtor_dec(); - check_bare_fn(ccx, &dtor_dec, - dtor.node.body, dtor.node.id, - Some(class_t)); + check_bare_fn( + ccx, + &dtor_dec, + &dtor.node.body, + dtor.node.id, + Some(class_t) + ); }; // Check that the class is instantiable @@ -568,7 +578,7 @@ pub fn check_item(ccx: @mut CrateCtxt, it: @ast::item) { it.id); } ast::item_fn(ref decl, _, _, ref body) => { - check_bare_fn(ccx, decl, (*body), it.id, None); + check_bare_fn(ccx, decl, body, it.id, None); } ast::item_impl(_, _, ty, ms) => { let rp = ccx.tcx.region_paramd_items.find(&it.id); @@ -1404,7 +1414,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, fn check_for(fcx: @mut FnCtxt, local: @ast::local, element_ty: ty::t, - body: ast::blk, + body: &ast::blk, node_id: ast::node_id) -> bool { let local_ty = fcx.local_ty(local.span, local.node.id); @@ -1418,7 +1428,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, // A generic function for checking the then and else in an if // or if-check fn check_then_else(fcx: @mut FnCtxt, - thn: ast::blk, + thn: &ast::blk, elsopt: Option<@ast::expr>, id: ast::node_id, _sp: span) @@ -1616,7 +1626,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, expr: @ast::expr, ast_sigil_opt: Option, decl: &ast::fn_decl, - body: ast::blk, + body: &ast::blk, fn_kind: FnKind, expected: Option) { let tcx = fcx.ccx.tcx; @@ -1660,7 +1670,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, let mut fn_ty = astconv::ty_of_closure( fcx, fcx, sigil, purity, expected_onceness, - None, *decl, expected_tys, expr.span); + None, decl, expected_tys, expr.span); let fty = ty::mk_closure(tcx, copy fn_ty); @@ -2101,7 +2111,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, // message because of the indirect_ret_ty. let fn_kind = if err_happened {Vanilla} else {ForLoop}; check_expr_fn(fcx, loop_body, None, - decl, *body, fn_kind, Some(inner_ty)); + decl, body, fn_kind, Some(inner_ty)); demand::suptype(fcx, loop_body.span, inner_ty, fcx.expr_ty(loop_body)); } @@ -2348,28 +2358,28 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, } ast::expr_if(cond, ref thn, elsopt) => { bot = check_expr_has_type(fcx, cond, ty::mk_bool(tcx)); - bot |= check_then_else(fcx, *thn, elsopt, id, expr.span); + bot |= check_then_else(fcx, thn, elsopt, id, expr.span); } ast::expr_while(cond, ref body) => { bot = check_expr_has_type(fcx, cond, ty::mk_bool(tcx)); - check_block_no_value(fcx, (*body)); + check_block_no_value(fcx, body); fcx.write_ty(id, ty::mk_nil(tcx)); } ast::expr_loop(ref body, _) => { - check_block_no_value(fcx, (*body)); + check_block_no_value(fcx, body); fcx.write_ty(id, ty::mk_nil(tcx)); - bot = !may_break(tcx, expr.id, (*body)); + bot = !may_break(tcx, expr.id, body); } ast::expr_match(discrim, ref arms) => { bot = _match::check_match(fcx, expr, discrim, (/*bad*/copy *arms)); } ast::expr_fn(sigil, ref decl, ref body, _) => { check_expr_fn(fcx, expr, Some(sigil), - decl, (*body), Vanilla, expected); + decl, body, Vanilla, expected); } ast::expr_fn_block(ref decl, ref body) => { check_expr_fn(fcx, expr, None, - decl, (*body), Vanilla, expected); + decl, body, Vanilla, expected); } ast::expr_loop_body(loop_body) => { check_loop_body(fcx, expr, expected, loop_body); @@ -2399,7 +2409,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, match b.node { ast::expr_fn_block(ref decl, ref body) => { check_expr_fn(fcx, b, None, - decl, *body, DoBlock, Some(inner_ty)); + decl, body, DoBlock, Some(inner_ty)); demand::suptype(fcx, b.span, inner_ty, fcx.expr_ty(b)); } // argh @@ -2409,7 +2419,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, } ast::expr_block(ref b) => { // If this is an unchecked block, turn off purity-checking - bot = check_block_with_expected(fcx, *b, expected); + bot = check_block_with_expected(fcx, b, expected); let typ = match b.node.expr { Some(expr) => fcx.expr_ty(expr), @@ -2699,7 +2709,7 @@ pub fn check_stmt(fcx: @mut FnCtxt, stmt: @ast::stmt) -> bool { return bot; } -pub fn check_block_no_value(fcx: @mut FnCtxt, blk: ast::blk) -> bool { +pub fn check_block_no_value(fcx: @mut FnCtxt, blk: &ast::blk) -> bool { let bot = check_block(fcx, blk); if !bot { let blkty = fcx.node_ty(blk.node.id); @@ -2709,12 +2719,12 @@ pub fn check_block_no_value(fcx: @mut FnCtxt, blk: ast::blk) -> bool { return bot; } -pub fn check_block(fcx0: @mut FnCtxt, blk: ast::blk) -> bool { +pub fn check_block(fcx0: @mut FnCtxt, blk: &ast::blk) -> bool { check_block_with_expected(fcx0, blk, None) } pub fn check_block_with_expected(fcx0: @mut FnCtxt, - blk: ast::blk, + blk: &ast::blk, expected: Option) -> bool { let fcx = match blk.node.rules { @@ -3109,7 +3119,7 @@ pub fn ast_expr_vstore_to_vstore(fcx: @mut FnCtxt, } // Returns true if b contains a break that can exit from b -pub fn may_break(cx: ty::ctxt, id: ast::node_id, b: ast::blk) -> bool { +pub fn may_break(cx: ty::ctxt, id: ast::node_id, b: &ast::blk) -> bool { // First: is there an unlabeled break immediately // inside the loop? (loop_query(b, |e| { diff --git a/src/librustc/middle/typeck/check/regionck.rs b/src/librustc/middle/typeck/check/regionck.rs index 7187f15e7495a..1f2dfe7192fb5 100644 --- a/src/librustc/middle/typeck/check/regionck.rs +++ b/src/librustc/middle/typeck/check/regionck.rs @@ -125,7 +125,7 @@ pub fn regionck_expr(fcx: @mut FnCtxt, e: @ast::expr) { fcx.infcx().resolve_regions(); } -pub fn regionck_fn(fcx: @mut FnCtxt, blk: ast::blk) { +pub fn regionck_fn(fcx: @mut FnCtxt, blk: &ast::blk) { let rcx = @mut Rcx { fcx: fcx, errors_reported: 0 }; let v = regionck_visitor(); (v.visit_block)(blk, rcx, v); @@ -176,7 +176,7 @@ pub fn visit_local(l: @ast::local, &&rcx: @mut Rcx, v: rvt) { } } -pub fn visit_block(b: ast::blk, &&rcx: @mut Rcx, v: rvt) { +pub fn visit_block(b: &ast::blk, &&rcx: @mut Rcx, v: rvt) { visit::visit_block(b, rcx, v); } diff --git a/src/librustc/middle/typeck/check/vtable.rs b/src/librustc/middle/typeck/check/vtable.rs index 5ed247eb447a2..2dbf74e1666b4 100644 --- a/src/librustc/middle/typeck/check/vtable.rs +++ b/src/librustc/middle/typeck/check/vtable.rs @@ -695,7 +695,7 @@ pub fn resolve_expr(ex: @ast::expr, // Detect points where a trait-bounded type parameter is // instantiated, resolve the impls for the parameters. -pub fn resolve_in_block(fcx: @mut FnCtxt, bl: ast::blk) { +pub fn resolve_in_block(fcx: @mut FnCtxt, bl: &ast::blk) { visit::visit_block(bl, fcx, visit::mk_vt(@visit::Visitor { visit_expr: resolve_expr, visit_item: |_,_,_| {}, diff --git a/src/librustc/middle/typeck/check/writeback.rs b/src/librustc/middle/typeck/check/writeback.rs index a6aed9c7b0c91..e5aca315dd1c9 100644 --- a/src/librustc/middle/typeck/check/writeback.rs +++ b/src/librustc/middle/typeck/check/writeback.rs @@ -218,7 +218,7 @@ fn visit_expr(e: @ast::expr, &&wbcx: @mut WbCtxt, v: wb_vt) { } visit::visit_expr(e, wbcx, v); } -fn visit_block(b: ast::blk, &&wbcx: @mut WbCtxt, v: wb_vt) { +fn visit_block(b: &ast::blk, &&wbcx: @mut WbCtxt, v: wb_vt) { if !wbcx.success { return; } resolve_type_vars_for_node(wbcx, b.span, b.node.id); visit::visit_block(b, wbcx, v); @@ -278,7 +278,7 @@ pub fn resolve_type_vars_in_expr(fcx: @mut FnCtxt, e: @ast::expr) -> bool { pub fn resolve_type_vars_in_fn(fcx: @mut FnCtxt, decl: &ast::fn_decl, - blk: ast::blk, + blk: &ast::blk, self_info: Option) -> bool { let wbcx = @mut WbCtxt { fcx: fcx, success: true }; let visit = mk_visitor(); diff --git a/src/librustc/middle/typeck/coherence.rs b/src/librustc/middle/typeck/coherence.rs index 5ab2bcd851912..f8fcef7580b1d 100644 --- a/src/librustc/middle/typeck/coherence.rs +++ b/src/librustc/middle/typeck/coherence.rs @@ -628,7 +628,7 @@ pub impl CoherenceChecker { visit_crate(*crate, (), mk_vt(@Visitor { visit_item: |item, _context, visitor| { match /*bad*/copy item.node { - item_mod(module_) => { + item_mod(ref module_) => { // Then visit the module items. visit_mod(module_, item.span, item.id, (), visitor); } diff --git a/src/librustc/middle/typeck/collect.rs b/src/librustc/middle/typeck/collect.rs index ecc31cb6bd1a8..67dca7ea81196 100644 --- a/src/librustc/middle/typeck/collect.rs +++ b/src/librustc/middle/typeck/collect.rs @@ -230,7 +230,7 @@ pub fn ensure_trait_methods(ccx: @mut CrateCtxt, } fn make_static_method_ty(ccx: @mut CrateCtxt, - am: ast::ty_method, + am: &ast::ty_method, rp: Option, m: ty::method, // Take this as an argument b/c we may check @@ -291,10 +291,15 @@ pub fn ensure_trait_methods(ccx: @mut CrateCtxt, } let trait_bounds = ty_param_bounds(ccx, generics); - let ty_m = trait_method_to_ty_method(*m); - let method_ty = ty_of_ty_method(ccx, ty_m, region_paramd, def_id); + let ty_m = trait_method_to_ty_method(m); + let method_ty = ty_of_ty_method( + ccx, + &ty_m, + region_paramd, + def_id + ); if ty_m.self_ty.node == ast::sty_static { - make_static_method_ty(ccx, ty_m, region_paramd, + make_static_method_ty(ccx, &ty_m, region_paramd, method_ty, trait_ty, trait_bounds); } @@ -692,7 +697,7 @@ pub fn convert_struct(ccx: @mut CrateCtxt, astconv::ty_of_bare_fn( ccx, type_rscope(rp), ast::impure_fn, ast::RustAbi, - ast_util::dtor_dec())); + &ast_util::dtor_dec())); write_ty_to_tcx(tcx, dtor.node.id, t_dtor); tcx.tcache.insert(local_def(dtor.node.id), ty_param_bounds_and_ty { @@ -751,7 +756,7 @@ pub fn ty_of_method(ccx: @mut CrateCtxt, ident: m.ident, tps: ty_param_bounds(ccx, &m.generics), fty: astconv::ty_of_bare_fn(ccx, type_rscope(rp), m.purity, - ast::RustAbi, m.decl), + ast::RustAbi, &m.decl), self_ty: m.self_ty.node, vis: m.vis, def_id: local_def(m.id) @@ -759,14 +764,14 @@ pub fn ty_of_method(ccx: @mut CrateCtxt, } pub fn ty_of_ty_method(self: @mut CrateCtxt, - m: ast::ty_method, + m: &ast::ty_method, rp: Option, id: ast::def_id) -> ty::method { ty::method { ident: m.ident, tps: ty_param_bounds(self, &m.generics), fty: astconv::ty_of_bare_fn(self, type_rscope(rp), m.purity, - ast::RustAbi, m.decl), + ast::RustAbi, &m.decl), // assume public, because this is only invoked on trait methods self_ty: m.self_ty.node, vis: ast::public, @@ -822,7 +827,7 @@ pub fn ty_of_item(ccx: @mut CrateCtxt, it: @ast::item) ast::item_fn(ref decl, purity, ref generics, _) => { let bounds = ty_param_bounds(ccx, generics); let tofd = astconv::ty_of_bare_fn(ccx, empty_rscope, purity, - ast::RustAbi, *decl); + ast::RustAbi, decl); let tpt = ty_param_bounds_and_ty { bounds: bounds, region_param: None, @@ -905,19 +910,23 @@ pub fn ty_of_item(ccx: @mut CrateCtxt, it: @ast::item) pub fn ty_of_foreign_item(ccx: @mut CrateCtxt, it: @ast::foreign_item) -> ty::ty_param_bounds_and_ty { - match it.node { - ast::foreign_item_fn(ref fn_decl, _, ref generics) => { - return ty_of_foreign_fn_decl(ccx, *fn_decl, local_def(it.id), - generics); - } - ast::foreign_item_const(t) => { - let rb = in_binding_rscope(empty_rscope); - return ty::ty_param_bounds_and_ty { - bounds: @~[], - region_param: None, - ty: ast_ty_to_ty(ccx, rb, t) - }; - } + match /*bad*/copy it.node { + ast::foreign_item_fn(ref fn_decl, _, ref generics) => { + ty_of_foreign_fn_decl( + ccx, + fn_decl, + local_def(it.id), + generics + ) + } + ast::foreign_item_const(t) => { + let rb = in_binding_rscope(empty_rscope); + ty::ty_param_bounds_and_ty { + bounds: @~[], + region_param: None, + ty: ast_ty_to_ty(ccx, rb, t) + } + } } } @@ -977,7 +986,7 @@ pub fn ty_param_bounds(ccx: @mut CrateCtxt, } pub fn ty_of_foreign_fn_decl(ccx: @mut CrateCtxt, - decl: ast::fn_decl, + decl: &ast::fn_decl, def_id: ast::def_id, generics: &ast::Generics) -> ty::ty_param_bounds_and_ty { diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index a83447432a006..6357198fdf2cb 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -59,7 +59,7 @@ pub fn field_exprs(fields: ~[ast::field]) -> ~[@ast::expr] { // Takes a predicate p, returns true iff p is true for any subexpressions // of b -- skipping any inner loops (loop, while, loop_body) -pub fn loop_query(b: ast::blk, p: fn@(ast::expr_) -> bool) -> bool { +pub fn loop_query(b: &ast::blk, p: fn@(ast::expr_) -> bool) -> bool { let rs = @mut false; let visit_expr: @fn(@ast::expr, &&flag: @mut bool, @@ -82,7 +82,7 @@ pub fn loop_query(b: ast::blk, p: fn@(ast::expr_) -> bool) -> bool { // Takes a predicate p, returns true iff p is true for any subexpressions // of b -- skipping any inner loops (loop, while, loop_body) -pub fn block_query(b: ast::blk, p: fn@(@ast::expr) -> bool) -> bool { +pub fn block_query(b: &ast::blk, p: fn@(@ast::expr) -> bool) -> bool { let rs = @mut false; let visit_expr: @fn(@ast::expr, &&flag: @mut bool, diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index a337431cefdb8..995fdba35cbd7 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -70,7 +70,7 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region) re_scope(node_id) => { match cx.items.find(&node_id) { Some(ast_map::node_block(ref blk)) => { - explain_span(cx, "block", (*blk).span) + explain_span(cx, "block", blk.span) } Some(ast_map::node_expr(expr)) => { match expr.node { @@ -108,7 +108,7 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region) match cx.items.find(&id) { Some(ast_map::node_block(ref blk)) => { - let (msg, opt_span) = explain_span(cx, "block", (*blk).span); + let (msg, opt_span) = explain_span(cx, "block", blk.span); (fmt!("%s %s", prefix, msg), opt_span) } Some(_) | None => { @@ -159,7 +159,7 @@ pub fn re_scope_id_to_str(cx: ctxt, node_id: ast::node_id) -> ~str { match cx.items.find(&node_id) { Some(ast_map::node_block(ref blk)) => { fmt!("", - cx.sess.codemap.span_to_str((*blk).span)) + cx.sess.codemap.span_to_str(blk.span)) } Some(ast_map::node_expr(expr)) => { match expr.node { diff --git a/src/librustdoc/tystr_pass.rs b/src/librustdoc/tystr_pass.rs index bdfc2be7bd673..afd3ca0ef6af3 100644 --- a/src/librustdoc/tystr_pass.rs +++ b/src/librustdoc/tystr_pass.rs @@ -77,7 +77,7 @@ fn get_fn_sig(srv: astsrv::Srv, fn_id: doc::AstId) -> Option<~str> { ident: ident, node: ast::foreign_item_fn(ref decl, _, ref tys), _ }, _, _) => { - Some(pprust::fun_to_str(*decl, ident, tys, + Some(pprust::fun_to_str(decl, ident, tys, extract::interner())) } _ => fail!(~"get_fn_sig: fn_id not bound to a fn item") @@ -213,7 +213,7 @@ fn get_method_sig( match method { ast::required(ty_m) => { Some(pprust::fun_to_str( - ty_m.decl, + &ty_m.decl, ty_m.ident, &ty_m.generics, extract::interner() @@ -221,7 +221,7 @@ fn get_method_sig( } ast::provided(m) => { Some(pprust::fun_to_str( - m.decl, + &m.decl, m.ident, &m.generics, extract::interner() @@ -240,7 +240,7 @@ fn get_method_sig( }) { Some(method) => { Some(pprust::fun_to_str( - method.decl, + &method.decl, method.ident, &method.generics, extract::interner() diff --git a/src/librustpkg/util.rs b/src/librustpkg/util.rs index 64a6d9c50554a..fdabe86359a6b 100644 --- a/src/librustpkg/util.rs +++ b/src/librustpkg/util.rs @@ -76,7 +76,7 @@ struct ReadyCtx { fns: ~[ListenerFn] } -fn fold_mod(_ctx: @mut ReadyCtx, m: ast::_mod, +fn fold_mod(_ctx: @mut ReadyCtx, m: &ast::_mod, fold: fold::ast_fold) -> ast::_mod { fn strip_main(item: @ast::item) -> @ast::item { @ast::item { @@ -87,11 +87,11 @@ fn fold_mod(_ctx: @mut ReadyCtx, m: ast::_mod, } } - fold::noop_fold_mod(ast::_mod { - items: do vec::map(m.items) |item| { + fold::noop_fold_mod(&ast::_mod { + items: do m.items.map |item| { strip_main(*item) }, - .. m + .. copy *m }, fold) } @@ -199,7 +199,7 @@ pub fn ready_crate(sess: session::Session, let fold = fold::make_fold(precursor); - @fold.fold_crate(*crate) + @fold.fold_crate(crate) } pub fn parse_vers(vers: ~str) -> result::Result { diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index c6994125b2613..1053473a3a589 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -1190,7 +1190,7 @@ pub enum attr_style { attr_outer, attr_inner, } #[deriving_eq] pub struct attribute_ { style: attr_style, - value: meta_item, + value: @meta_item, is_sugared_doc: bool, } diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index 40315d175cc4d..0627e063d976c 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -61,8 +61,8 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: ~str, itr: @ident_interner) -> ~str { let strs = do p.map |e| { match *e { - path_mod(s) => *itr.get(s), - path_name(s) => *itr.get(s) + path_mod(s) => copy *itr.get(s), + path_name(s) => copy *itr.get(s) } }; str::connect(strs, sep) @@ -71,7 +71,7 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: ~str, itr: @ident_interner) pub fn path_ident_to_str(p: path, i: ident, itr: @ident_interner) -> ~str { if vec::is_empty(p) { //FIXME /* FIXME (#2543) */ copy *i - *itr.get(i) + copy *itr.get(i) } else { fmt!("%s::%s", path_to_str(p, itr), *itr.get(i)) } @@ -83,8 +83,8 @@ pub fn path_to_str(p: &[path_elt], itr: @ident_interner) -> ~str { pub fn path_elt_to_str(pe: path_elt, itr: @ident_interner) -> ~str { match pe { - path_mod(s) => *itr.get(s), - path_name(s) => *itr.get(s) + path_mod(s) => copy *itr.get(s), + path_name(s) => copy *itr.get(s) } } @@ -119,7 +119,7 @@ pub struct Ctx { pub type vt = visit::vt<@mut Ctx>; pub fn extend(cx: @mut Ctx, +elt: ident) -> @path { - @(vec::append(cx.path, ~[path_name(elt)])) + @(vec::append(copy cx.path, ~[path_name(elt)])) } pub fn mk_ast_map_visitor() -> vt { @@ -161,7 +161,7 @@ pub fn map_decoded_item(diag: span_handler, // variables that are simultaneously in scope). let cx = @mut Ctx { map: @map, - path: path, + path: copy path, local_id: 0, diag: diag, }; @@ -174,10 +174,10 @@ pub fn map_decoded_item(diag: span_handler, ii_item(*) | ii_dtor(*) => { /* fallthrough */ } ii_foreign(i) => { cx.map.insert(i.id, node_foreign_item(i, foreign_abi_rust_intrinsic, - @path)); + @/*bad*/ copy path)); } ii_method(impl_did, m) => { - map_method(impl_did, @path, m, cx); + map_method(impl_did, @/*bad*/ copy path, m, cx); } } @@ -185,36 +185,46 @@ pub fn map_decoded_item(diag: span_handler, ii.accept(cx, v); } -pub fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, - sp: codemap::span, id: node_id, &&cx: @mut Ctx, v: vt) { +pub fn map_fn( + fk: &visit::fn_kind, + decl: &fn_decl, + body: &blk, + sp: codemap::span, + id: node_id, + &&cx: @mut Ctx, + v: visit::vt<@mut Ctx> +) { for decl.inputs.each |a| { cx.map.insert(a.id, - node_arg(/* FIXME (#2543) */ - copy *a, cx.local_id)); + node_arg(/* FIXME (#2543) */ copy *a, cx.local_id)); cx.local_id += 1u; } - match fk { - visit::fk_dtor(ref tps, ref attrs, self_id, parent_id) => { + match *fk { + visit::fk_dtor(generics, ref attrs, self_id, parent_id) => { let dt = @spanned { node: ast::struct_dtor_ { id: id, - attrs: (*attrs), + attrs: /* FIXME (#2543) */ vec::from_slice(*attrs), self_id: self_id, - body: /* FIXME (#2543) */ copy body, + body: /* FIXME (#2543) */ copy *body, }, span: sp, }; - cx.map.insert(id, node_dtor(/* FIXME (#2543) */ copy *tps, dt, - parent_id, - @/* FIXME (#2543) */ copy cx.path)); + cx.map.insert( + id, + node_dtor( + /* FIXME (#2543) */ copy *generics, + dt, + parent_id, + @/* FIXME (#2543) */ copy cx.path)); } _ => () } visit::visit_fn(fk, decl, body, sp, id, cx, v); } -pub fn map_block(b: blk, &&cx: @mut Ctx, v: vt) { - cx.map.insert(b.node.id, node_block(/* FIXME (#2543) */ copy b)); +pub fn map_block(b: &blk, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { + cx.map.insert(b.node.id, node_block(/* FIXME (#2543) */ copy *b)); visit::visit_block(b, cx, v); } @@ -230,12 +240,12 @@ pub fn number_pat(cx: @mut Ctx, pat: @pat) { }; } -pub fn map_local(loc: @local, &&cx: @mut Ctx, v: vt) { +pub fn map_local(loc: @local, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { number_pat(cx, loc.node.pat); visit::visit_local(loc, cx, v); } -pub fn map_arm(arm: arm, &&cx: @mut Ctx, v: vt) { +pub fn map_arm(arm: &arm, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { number_pat(cx, arm.pats[0]); visit::visit_arm(arm, cx, v); } @@ -247,70 +257,85 @@ pub fn map_method(impl_did: def_id, impl_path: @path, cx.local_id += 1u; } -pub fn map_item(i: @item, &&cx: @mut Ctx, v: vt) { +pub fn map_item(i: @item, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { let item_path = @/* FIXME (#2543) */ copy cx.path; cx.map.insert(i.id, node_item(i, item_path)); match i.node { - item_impl(_, _, _, ms) => { - let impl_did = ast_util::local_def(i.id); - for ms.each |m| { - map_method(impl_did, extend(cx, i.ident), *m, cx); + item_impl(_, _, _, ref ms) => { + let impl_did = ast_util::local_def(i.id); + for ms.each |m| { + map_method(impl_did, extend(cx, i.ident), *m, cx); + } } - } - item_enum(ref enum_definition, _) => { - for (*enum_definition).variants.each |v| { - cx.map.insert(v.node.id, node_variant( - /* FIXME (#2543) */ copy *v, i, - extend(cx, i.ident))); + item_enum(ref enum_definition, _) => { + for (*enum_definition).variants.each |v| { + cx.map.insert(v.node.id, node_variant( + /* FIXME (#2543) */ copy *v, i, + extend(cx, i.ident))); + } } - } - item_foreign_mod(nm) => { - let abi = match attr::foreign_abi(i.attrs) { - either::Left(ref msg) => cx.diag.span_fatal(i.span, (*msg)), - either::Right(abi) => abi - }; - for nm.items.each |nitem| { - cx.map.insert(nitem.id, - node_foreign_item(*nitem, abi, - /* FIXME (#2543) */ - if nm.sort == ast::named { - extend(cx, i.ident) - } - else { - /* Anonymous extern mods go - in the parent scope */ - @copy cx.path - })); + item_foreign_mod(ref nm) => { + let abi = match attr::foreign_abi(i.attrs) { + Left(ref msg) => cx.diag.span_fatal(i.span, (*msg)), + Right(abi) => abi + }; + for nm.items.each |nitem| { + cx.map.insert(nitem.id, + node_foreign_item( + *nitem, + abi, + // FIXME (#2543) + if nm.sort == ast::named { + extend(cx, i.ident) + } else { + // Anonymous extern mods go in the parent scope + @copy cx.path + } + ) + ); + } } - } - item_struct(struct_def, _) => { - map_struct_def(struct_def, node_item(i, item_path), i.ident, cx, - v); - } - item_trait(_, ref traits, ref methods) => { - for traits.each |p| { - cx.map.insert(p.ref_id, node_item(i, item_path)); + item_struct(struct_def, _) => { + map_struct_def( + struct_def, + node_item(i, item_path), + i.ident, + cx, + v + ); } - for (*methods).each |tm| { - let id = ast_util::trait_method_to_ty_method(*tm).id; - let d_id = ast_util::local_def(i.id); - cx.map.insert(id, node_trait_method(@*tm, d_id, item_path)); + item_trait(_, ref traits, ref methods) => { + for traits.each |p| { + cx.map.insert(p.ref_id, node_item(i, item_path)); + } + for methods.each |tm| { + let id = ast_util::trait_method_to_ty_method(tm).id; + let d_id = ast_util::local_def(i.id); + cx.map.insert( + id, + node_trait_method(@copy *tm, d_id, item_path) + ); + } } - } - _ => () + _ => () } match i.node { - item_mod(_) | item_foreign_mod(_) => { - cx.path.push(path_mod(i.ident)); - } - _ => cx.path.push(path_name(i.ident)) + item_mod(_) | item_foreign_mod(_) => { + cx.path.push(path_mod(i.ident)); + } + _ => cx.path.push(path_name(i.ident)) } visit::visit_item(i, cx, v); cx.path.pop(); } -pub fn map_struct_def(struct_def: @ast::struct_def, parent_node: ast_node, - ident: ast::ident, cx: @mut Ctx, _v: vt) { +pub fn map_struct_def( + struct_def: @ast::struct_def, + parent_node: ast_node, + ident: ast::ident, + cx: @mut Ctx, + _v: visit::vt<@mut Ctx> +) { let p = extend(cx, ident); // If this is a tuple-like struct, register the constructor. match struct_def.ctor_id { @@ -327,12 +352,12 @@ pub fn map_struct_def(struct_def: @ast::struct_def, parent_node: ast_node, } } -pub fn map_expr(ex: @expr, &&cx: @mut Ctx, v: vt) { +pub fn map_expr(ex: @expr, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { cx.map.insert(ex.id, node_expr(ex)); visit::visit_expr(ex, cx, v); } -pub fn map_stmt(stmt: @stmt, &&cx: @mut Ctx, v: vt) { +pub fn map_stmt(stmt: @stmt, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { cx.map.insert(stmt_id(*stmt), node_stmt(stmt)); visit::visit_stmt(stmt, cx, v); } @@ -366,14 +391,14 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str { fmt!("method %s in %s (id=%?)", *itr.get(m.ident), path_to_str(*path, itr), id) } - Some(node_trait_method(tm, _, path)) => { - let m = ast_util::trait_method_to_ty_method(*tm); + Some(node_trait_method(ref tm, _, path)) => { + let m = ast_util::trait_method_to_ty_method(&**tm); fmt!("method %s in %s (id=%?)", *itr.get(m.ident), path_to_str(*path, itr), id) } Some(node_variant(ref variant, _, path)) => { fmt!("variant %s in %s (id=%?)", - *itr.get((*variant).node.name), path_to_str(*path, itr), id) + *itr.get(variant.node.name), path_to_str(*path, itr), id) } Some(node_expr(expr)) => { fmt!("expr %s (id=%?)", pprust::expr_to_str(expr, itr), id) @@ -401,8 +426,8 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str { } pub fn node_item_query(items: map, id: node_id, - query: fn(@item) -> Result, - error_msg: ~str) -> Result { + query: fn(@item) -> Result, + +error_msg: ~str) -> Result { match items.find(&id) { Some(node_item(it, _)) => query(it), _ => fail!(error_msg) diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index ab14f6cc08621..7e0cd2640b2c4 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -28,7 +28,7 @@ use core::vec; pub pure fn path_name_i(idents: &[ident], intr: @token::ident_interner) -> ~str { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") - str::connect(idents.map(|i| *intr.get(*i)), ~"::") + str::connect(idents.map(|i| copy *intr.get(*i)), ~"::") } @@ -261,15 +261,15 @@ pub fn public_methods(ms: ~[@method]) -> ~[@method] { // extract a ty_method from a trait_method. if the trait_method is // a default, pull out the useful fields to make a ty_method -pub fn trait_method_to_ty_method(method: trait_method) -> ty_method { - match method { - required(ref m) => (*m), +pub fn trait_method_to_ty_method(method: &trait_method) -> ty_method { + match *method { + required(ref m) => copy *m, provided(ref m) => { ty_method { ident: m.ident, - attrs: m.attrs, + attrs: copy m.attrs, purity: m.purity, - decl: m.decl, + decl: copy m.decl, generics: copy m.generics, self_ty: m.self_ty, id: m.id, @@ -279,12 +279,12 @@ pub fn trait_method_to_ty_method(method: trait_method) -> ty_method { } } -pub fn split_trait_methods(trait_methods: ~[trait_method]) +pub fn split_trait_methods(trait_methods: &[trait_method]) -> (~[ty_method], ~[@method]) { let mut reqd = ~[], provd = ~[]; for trait_methods.each |trt_method| { match *trt_method { - required(ref tm) => reqd.push((*tm)), + required(ref tm) => reqd.push(copy *tm), provided(m) => provd.push(m) } }; @@ -411,8 +411,8 @@ pub fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { visit_view_item: fn@(vi: @view_item) { match vi.node { view_item_extern_mod(_, _, id) => vfn(id), - view_item_use(vps) => { - for vec::each(vps) |vp| { + view_item_use(ref vps) => { + for vps.each |vp| { match vp.node { view_path_simple(_, _, _, id) => vfn(id), view_path_glob(_, id) => vfn(id), @@ -440,7 +440,7 @@ pub fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { vfn(l.node.id); }, - visit_block: fn@(b: blk) { + visit_block: fn@(b: &blk) { vfn(b.node.id); }, @@ -448,7 +448,7 @@ pub fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { vfn(ast_util::stmt_id(*s)); }, - visit_arm: fn@(_a: arm) { }, + visit_arm: fn@(_a: &arm) { }, visit_pat: fn@(p: @pat) { vfn(p.id) @@ -474,21 +474,21 @@ pub fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { visit_generics: visit_generics, - visit_fn: fn@(fk: visit::fn_kind, d: ast::fn_decl, - _b: ast::blk, _sp: span, id: ast::node_id) { + visit_fn: fn@(fk: &visit::fn_kind, d: &ast::fn_decl, + _b: &ast::blk, _sp: span, id: ast::node_id) { vfn(id); - match fk { - visit::fk_dtor(ref generics, _, self_id, parent_id) => { + match *fk { + visit::fk_dtor(generics, _, self_id, parent_id) => { visit_generics(generics); vfn(id); vfn(self_id); vfn(parent_id.node); } - visit::fk_item_fn(_, ref generics, _) => { + visit::fk_item_fn(_, generics, _) => { visit_generics(generics); } - visit::fk_method(_, ref generics, m) => { + visit::fk_method(_, generics, m) => { vfn(m.self_id); visit_generics(generics); } @@ -502,10 +502,10 @@ pub fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { } }, - visit_ty_method: fn@(_ty_m: ty_method) { + visit_ty_method: fn@(_ty_m: &ty_method) { }, - visit_trait_method: fn@(_ty_m: trait_method) { + visit_trait_method: fn@(_ty_m: &trait_method) { }, visit_struct_def: fn@(_sd: @struct_def, @@ -552,12 +552,12 @@ pub fn walk_pat(pat: @pat, it: fn(@pat)) { it(pat); match pat.node { pat_ident(_, _, Some(p)) => walk_pat(p, it), - pat_rec(fields, _) | pat_struct(_, fields, _) => { + pat_rec(ref fields, _) | pat_struct(_, ref fields, _) => { for fields.each |f| { walk_pat(f.pat, it) } } - pat_enum(_, Some(s)) | pat_tup(s) => { + pat_enum(_, Some(ref s)) | pat_tup(ref s) => { for s.each |p| { walk_pat(*p, it) } @@ -565,17 +565,16 @@ pub fn walk_pat(pat: @pat, it: fn(@pat)) { pat_box(s) | pat_uniq(s) | pat_region(s) => { walk_pat(s, it) } - pat_vec(elts, tail) => { + pat_vec(ref elts, ref tail) => { for elts.each |p| { walk_pat(*p, it) } - do option::iter(&tail) |tail| { + do tail.iter |tail| { walk_pat(*tail, it) } } pat_wild | pat_lit(_) | pat_range(_, _) | pat_ident(_, _, _) | - pat_enum(_, _) => { - } + pat_enum(_, _) => { } } } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 14ffb1cab5da4..e744d19b96544 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -51,16 +51,17 @@ pub fn mk_word_item(name: @~str) -> @ast::meta_item { pub fn mk_attr(item: @ast::meta_item) -> ast::attribute { dummy_spanned(ast::attribute_ { style: ast::attr_inner, - value: *item, + value: item, is_sugared_doc: false }) } -pub fn mk_sugared_doc_attr(text: ~str, +pub fn mk_sugared_doc_attr(+text: ~str, +lo: BytePos, +hi: BytePos) -> ast::attribute { + let style = doc_comment_style(text); let lit = spanned(lo, hi, ast::lit_str(@text)); let attr = ast::attribute_ { - style: doc_comment_style(text), - value: spanned(lo, hi, ast::meta_name_value(@~"doc", lit)), + style: style, + value: @spanned(lo, hi, ast::meta_name_value(@~"doc", lit)), is_sugared_doc: true }; spanned(lo, hi, attr) @@ -69,7 +70,7 @@ pub fn mk_sugared_doc_attr(text: ~str, /* Conversion */ pub fn attr_meta(attr: ast::attribute) -> @ast::meta_item { - @attr.node.value + attr.node.value } // Get the meta_items from inside a vector of attributes @@ -79,7 +80,7 @@ pub fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] { pub fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute { if attr.node.is_sugared_doc { - let comment = get_meta_item_value_str(@attr.node.value).get(); + let comment = get_meta_item_value_str(attr.node.value).get(); let meta = mk_name_value_item_str(@~"doc", @strip_doc_comment_decoration(*comment)); mk_attr(meta) @@ -91,7 +92,7 @@ pub fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute { /* Accessors */ pub pure fn get_attr_name(attr: &ast::attribute) -> @~str { - get_meta_item_name(@attr.node.value) + get_meta_item_name(attr.node.value) } pub pure fn get_meta_item_name(meta: @ast::meta_item) -> @~str { @@ -122,8 +123,8 @@ pub fn get_meta_item_value_str(meta: @ast::meta_item) -> Option<@~str> { pub fn get_meta_item_list(meta: @ast::meta_item) -> Option<~[@ast::meta_item]> { match meta.node { - ast::meta_list(_, l) => option::Some(/* FIXME (#2543) */ copy l), - _ => option::None + ast::meta_list(_, ref l) => Some(/* FIXME (#2543) */ copy *l), + _ => None } } @@ -182,33 +183,33 @@ pub fn contains(haystack: &[@ast::meta_item], } fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool { - return match a.node { - ast::meta_word(ref na) => match b.node { + match a.node { + ast::meta_word(ref na) => match b.node { ast::meta_word(ref nb) => (*na) == (*nb), _ => false - }, - ast::meta_name_value(ref na, va) => match b.node { + }, + ast::meta_name_value(ref na, va) => match b.node { ast::meta_name_value(ref nb, vb) => { (*na) == (*nb) && va.node == vb.node } _ => false - }, - ast::meta_list(ref na, misa) => match b.node { - ast::meta_list(ref nb, misb) => { + }, + ast::meta_list(ref na, ref misa) => match b.node { + ast::meta_list(ref nb, ref misb) => { if na != nb { return false; } - for misa.each |&mi| { - if !contains(misb, mi) { return false; } + for misa.each |mi| { + if !misb.contains(mi) { return false; } } true } _ => false - } } + } } pub fn contains_name(metas: &[@ast::meta_item], name: &str) -> bool { let matches = find_meta_items_by_name(metas, name); - return vec::len(matches) > 0u; + matches.len() > 0u } pub fn attrs_contains_name(attrs: &[ast::attribute], name: &str) -> bool { @@ -226,14 +227,14 @@ pub fn first_attr_value_str_by_name(attrs: ~[ast::attribute], name: &str) } } -fn last_meta_item_by_name(items: ~[@ast::meta_item], name: &str) +fn last_meta_item_by_name(items: &[@ast::meta_item], name: &str) -> Option<@ast::meta_item> { let items = attr::find_meta_items_by_name(items, name); vec::last_opt(items) } -pub fn last_meta_item_value_str_by_name(items: ~[@ast::meta_item], name: &str) +pub fn last_meta_item_value_str_by_name(items: &[@ast::meta_item], name: &str) -> Option<@~str> { match last_meta_item_by_name(items, name) { @@ -259,21 +260,23 @@ pub fn last_meta_item_list_by_name(items: ~[@ast::meta_item], name: &str) /* Higher-level applications */ -pub fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] { +pub fn sort_meta_items(items: &[@ast::meta_item]) -> ~[@ast::meta_item] { // This is sort of stupid here, converting to a vec of mutables and back - let mut v = items; + let mut v = vec::from_slice(items); do std::sort::quick_sort(v) |ma, mb| { get_meta_item_name(*ma) <= get_meta_item_name(*mb) } // There doesn't seem to be a more optimal way to do this - do v.map |&m| { + do v.map |m| { match m.node { - ast::meta_list(n, mis) => @spanned { - node: ast::meta_list(n, sort_meta_items(mis)), - .. *m - }, - _ => m + ast::meta_list(n, ref mis) => { + @spanned { + node: ast::meta_list(n, sort_meta_items(*mis)), + .. /*bad*/ copy **m + } + } + _ => /*bad*/ copy *m } } } @@ -297,7 +300,7 @@ pub fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: &str) -> pub fn find_linkage_metas(attrs: &[ast::attribute]) -> ~[@ast::meta_item] { do find_attrs_by_name(attrs, ~"link").flat_map |attr| { match attr.node.value.node { - ast::meta_list(_, items) => /* FIXME (#2543) */ copy items, + ast::meta_list(_, ref items) => /* FIXME (#2543) */ copy *items, _ => ~[] } } @@ -344,11 +347,10 @@ pub fn find_inline_attr(attrs: &[ast::attribute]) -> inline_attr { do vec::foldl(ia_none, attrs) |ia,attr| { match attr.node.value.node { ast::meta_word(@~"inline") => ia_hint, - ast::meta_list(@~"inline", items) => { - if !vec::is_empty(find_meta_items_by_name(items, ~"always")) { + ast::meta_list(@~"inline", ref items) => { + if !find_meta_items_by_name(*items, ~"always").is_empty() { ia_always - } else if !vec::is_empty( - find_meta_items_by_name(items, ~"never")) { + } else if !find_meta_items_by_name(*items, ~"never").is_empty() { ia_never } else { ia_hint diff --git a/src/libsyntax/ext/auto_encode.rs b/src/libsyntax/ext/auto_encode.rs index b00fa9d0a1948..0e2f3c2c8562f 100644 --- a/src/libsyntax/ext/auto_encode.rs +++ b/src/libsyntax/ext/auto_encode.rs @@ -114,7 +114,7 @@ mod syntax { pub fn expand_auto_encode( cx: ext_ctxt, span: span, - _mitem: ast::meta_item, + _mitem: @ast::meta_item, in_items: ~[@ast::item] ) -> ~[@ast::item] { fn is_auto_encode(a: &ast::attribute) -> bool { @@ -124,7 +124,7 @@ pub fn expand_auto_encode( fn filter_attrs(item: @ast::item) -> @ast::item { @ast::item { attrs: item.attrs.filtered(|a| !is_auto_encode(a)), - .. *item + .. copy *item } } @@ -147,7 +147,7 @@ pub fn expand_auto_encode( cx, item.span, item.ident, - *enum_def, + copy *enum_def, generics ); @@ -169,7 +169,7 @@ pub fn expand_auto_encode( pub fn expand_auto_decode( cx: ext_ctxt, span: span, - _mitem: ast::meta_item, + _mitem: @ast::meta_item, in_items: ~[@ast::item] ) -> ~[@ast::item] { fn is_auto_decode(a: &ast::attribute) -> bool { @@ -179,7 +179,7 @@ pub fn expand_auto_decode( fn filter_attrs(item: @ast::item) -> @ast::item { @ast::item { attrs: item.attrs.filtered(|a| !is_auto_decode(a)), - .. *item + .. copy *item } } @@ -202,7 +202,7 @@ pub fn expand_auto_decode( cx, item.span, item.ident, - *enum_def, + copy *enum_def, generics ); @@ -241,7 +241,7 @@ priv impl ext_ctxt { } } - fn expr(span: span, node: ast::expr_) -> @ast::expr { + fn expr(span: span, +node: ast::expr_) -> @ast::expr { @ast::expr { id: self.next_id(), callee_id: self.next_id(), @@ -250,7 +250,7 @@ priv impl ext_ctxt { } } - fn path(span: span, strs: ~[ast::ident]) -> @ast::path { + fn path(span: span, +strs: ~[ast::ident]) -> @ast::path { @ast::path { span: span, global: false, @@ -260,7 +260,7 @@ priv impl ext_ctxt { } } - fn path_global(span: span, strs: ~[ast::ident]) -> @ast::path { + fn path_global(span: span, +strs: ~[ast::ident]) -> @ast::path { @ast::path { span: span, global: true, @@ -270,8 +270,11 @@ priv impl ext_ctxt { } } - fn path_tps(span: span, strs: ~[ast::ident], - tps: ~[@ast::Ty]) -> @ast::path { + fn path_tps( + span: span, + +strs: ~[ast::ident], + +tps: ~[@ast::Ty] + ) -> @ast::path { @ast::path { span: span, global: false, @@ -281,8 +284,11 @@ priv impl ext_ctxt { } } - fn path_tps_global(span: span, strs: ~[ast::ident], - tps: ~[@ast::Ty]) -> @ast::path { + fn path_tps_global( + span: span, + +strs: ~[ast::ident], + +tps: ~[@ast::Ty] + ) -> @ast::path { @ast::path { span: span, global: true, @@ -292,8 +298,11 @@ priv impl ext_ctxt { } } - fn ty_path(span: span, strs: ~[ast::ident], - tps: ~[@ast::Ty]) -> @ast::Ty { + fn ty_path( + span: span, + +strs: ~[ast::ident], + +tps: ~[@ast::Ty] + ) -> @ast::Ty { @ast::Ty { id: self.next_id(), node: ast::ty_path( @@ -339,13 +348,13 @@ priv impl ext_ctxt { span: span})) } - fn lambda(blk: ast::blk) -> @ast::expr { + fn lambda(+blk: ast::blk) -> @ast::expr { let ext_cx = self; - let blk_e = self.expr(blk.span, ast::expr_block(blk)); + let blk_e = self.expr(copy blk.span, ast::expr_block(copy blk)); quote_expr!( || $blk_e ) } - fn blk(span: span, stmts: ~[@ast::stmt]) -> ast::blk { + fn blk(span: span, +stmts: ~[@ast::stmt]) -> ast::blk { codemap::spanned { node: ast::blk_ { view_items: ~[], @@ -371,15 +380,15 @@ priv impl ext_ctxt { } } - fn expr_path(span: span, strs: ~[ast::ident]) -> @ast::expr { + fn expr_path(span: span, +strs: ~[ast::ident]) -> @ast::expr { self.expr(span, ast::expr_path(self.path(span, strs))) } - fn expr_path_global(span: span, strs: ~[ast::ident]) -> @ast::expr { + fn expr_path_global(span: span, +strs: ~[ast::ident]) -> @ast::expr { self.expr(span, ast::expr_path(self.path_global(span, strs))) } - fn expr_var(span: span, var: ~str) -> @ast::expr { + fn expr_var(span: span, +var: ~str) -> @ast::expr { self.expr_path(span, ~[self.ident_of(var)]) } @@ -394,7 +403,7 @@ priv impl ext_ctxt { fn expr_call( span: span, expr: @ast::expr, - args: ~[@ast::expr] + +args: ~[@ast::expr] ) -> @ast::expr { self.expr(span, ast::expr_call(expr, args, ast::NoSugar)) } @@ -403,7 +412,7 @@ priv impl ext_ctxt { self.lambda(self.expr_blk(expr)) } - fn lambda_stmts(span: span, stmts: ~[@ast::stmt]) -> @ast::expr { + fn lambda_stmts(span: span, +stmts: ~[@ast::stmt]) -> @ast::expr { self.lambda(self.blk(span, stmts)) } } @@ -572,7 +581,7 @@ fn mk_deser_impl( fn mk_ser_method( cx: ext_ctxt, span: span, - ser_body: ast::blk + +ser_body: ast::blk ) -> @ast::method { let ty_s = @ast::Ty { id: cx.next_id(), @@ -636,7 +645,7 @@ fn mk_deser_method( cx: ext_ctxt, span: span, ty: @ast::Ty, - deser_body: ast::blk + +deser_body: ast::blk ) -> @ast::method { let ty_d = @ast::Ty { id: cx.next_id(), @@ -858,14 +867,14 @@ fn mk_enum_ser_impl( cx: ext_ctxt, span: span, ident: ast::ident, - enum_def: ast::enum_def, + +enum_def: ast::enum_def, generics: &ast::Generics ) -> @ast::item { let body = mk_enum_ser_body( cx, span, ident, - enum_def.variants + copy enum_def.variants ); mk_ser_impl(cx, span, ident, generics, body) @@ -875,7 +884,7 @@ fn mk_enum_deser_impl( cx: ext_ctxt, span: span, ident: ast::ident, - enum_def: ast::enum_def, + +enum_def: ast::enum_def, generics: &ast::Generics ) -> @ast::item { let body = mk_enum_deser_body( @@ -974,12 +983,18 @@ fn mk_enum_ser_body( cx: ext_ctxt, span: span, name: ast::ident, - variants: ~[ast::variant] + +variants: ~[ast::variant] ) -> @ast::expr { let arms = do variants.mapi |v_idx, variant| { match variant.node.kind { - ast::tuple_variant_kind(args) => - ser_variant(cx, span, variant.node.name, v_idx, args), + ast::tuple_variant_kind(ref args) => + ser_variant( + cx, + span, + variant.node.name, + v_idx, + /*bad*/ copy *args + ), ast::struct_variant_kind(*) => fail!(~"struct variants unimplemented"), ast::enum_variant_kind(*) => @@ -1059,7 +1074,7 @@ fn mk_enum_deser_body( ) -> @ast::expr { let mut arms = do variants.mapi |v_idx, variant| { let body = match variant.node.kind { - ast::tuple_variant_kind(args) => { + ast::tuple_variant_kind(ref args) => { if args.is_empty() { // for a nullary variant v, do "v" ext_cx.expr_path(span, ~[variant.node.name]) @@ -1069,7 +1084,7 @@ fn mk_enum_deser_body( ext_cx, span, variant.node.name, - args + copy *args ) } }, @@ -1092,7 +1107,7 @@ fn mk_enum_deser_body( } }; - let quoted_expr = quote_expr!( + let quoted_expr = copy quote_expr!( ::core::sys::begin_unwind(~"explicit failure", ~"empty", 1); ).node; diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 9525369d33415..7d3c7cafa9525 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -38,7 +38,7 @@ pub struct MacroDef { } pub type ItemDecorator = - fn@(ext_ctxt, span, ast::meta_item, ~[@ast::item]) -> ~[@ast::item]; + fn@(ext_ctxt, span, @ast::meta_item, ~[@ast::item]) -> ~[@ast::item]; pub struct SyntaxExpanderTT { expander: SyntaxExpanderTTFun, @@ -46,7 +46,7 @@ pub struct SyntaxExpanderTT { } pub type SyntaxExpanderTTFun - = fn@(ext_ctxt, span, ~[ast::token_tree]) -> MacResult; + = fn@(ext_ctxt, span, &[ast::token_tree]) -> MacResult; pub struct SyntaxExpanderTTItem { expander: SyntaxExpanderTTItemFun, @@ -222,7 +222,7 @@ pub trait ext_ctxt { } pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, - cfg: ast::crate_cfg) -> ext_ctxt { + +cfg: ast::crate_cfg) -> ext_ctxt { struct CtxtRepr { parse_sess: @mut parse::ParseSess, cfg: ast::crate_cfg, @@ -233,7 +233,7 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, impl ext_ctxt for CtxtRepr { fn codemap(@mut self) -> @CodeMap { self.parse_sess.cm } fn parse_sess(@mut self) -> @mut parse::ParseSess { self.parse_sess } - fn cfg(@mut self) -> ast::crate_cfg { self.cfg } + fn cfg(@mut self) -> ast::crate_cfg { copy self.cfg } fn call_site(@mut self) -> span { match *self.backtrace { Some(@ExpandedFrom(CallInfo {call_site: cs, _})) => cs, @@ -244,7 +244,7 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, fn backtrace(@mut self) -> Option<@ExpnInfo> { *self.backtrace } fn mod_push(@mut self, i: ast::ident) { self.mod_path.push(i); } fn mod_pop(@mut self) { self.mod_path.pop(); } - fn mod_path(@mut self) -> ~[ast::ident] { return self.mod_path; } + fn mod_path(@mut self) -> ~[ast::ident] { copy self.mod_path } fn bt_push(@mut self, ei: codemap::ExpnInfo) { match ei { ExpandedFrom(CallInfo {call_site: cs, callee: ref callee}) => { @@ -252,7 +252,7 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, Some(@ExpandedFrom(CallInfo { call_site: span {lo: cs.lo, hi: cs.hi, expn_info: *self.backtrace}, - callee: (*callee)})); + callee: copy *callee})); } } } @@ -299,12 +299,11 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, fn set_trace_macros(@mut self, x: bool) { self.trace_mac = x } - fn str_of(@mut self, id: ast::ident) -> ~str { - *self.parse_sess.interner.get(id) + copy *self.parse_sess.interner.get(id) } fn ident_of(@mut self, st: ~str) -> ast::ident { - self.parse_sess.interner.intern(@st) + self.parse_sess.interner.intern(@/*bad*/ copy st) } } let imp: @mut CtxtRepr = @mut CtxtRepr { @@ -320,7 +319,7 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, pub fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, err_msg: ~str) -> ~str { match expr.node { ast::expr_lit(l) => match l.node { - ast::lit_str(s) => return *s, + ast::lit_str(s) => copy *s, _ => cx.span_fatal(l.span, err_msg) }, _ => cx.span_fatal(expr.span, err_msg) @@ -363,15 +362,15 @@ pub fn get_single_str_from_tts(cx: ext_ctxt, } } -pub fn get_exprs_from_tts(cx: ext_ctxt, tts: ~[ast::token_tree]) +pub fn get_exprs_from_tts(cx: ext_ctxt, tts: &[ast::token_tree]) -> ~[@ast::expr] { let p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), - tts); + vec::from_slice(tts)); let mut es = ~[]; while *p.token != token::EOF { if es.len() != 0 { - p.eat(token::COMMA); + p.eat(&token::COMMA); } es.push(p.parse_expr()); } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index fa21243df0367..c6e6e677348a1 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -29,7 +29,7 @@ pub struct Field { pub fn mk_expr(cx: ext_ctxt, sp: codemap::span, - expr: ast::expr_) + +expr: ast::expr_) -> @ast::expr { @ast::expr { id: cx.next_id(), @@ -65,7 +65,7 @@ pub fn mk_unary(cx: ext_ctxt, sp: span, op: ast::unop, e: @ast::expr) cx.next_id(); // see ast_util::op_expr_callee_id mk_expr(cx, sp, ast::expr_unary(op, e)) } -pub fn mk_raw_path(sp: span, idents: ~[ast::ident]) -> @ast::path { +pub fn mk_raw_path(sp: span, +idents: ~[ast::ident]) -> @ast::path { let p = @ast::path { span: sp, global: false, idents: idents, @@ -74,7 +74,7 @@ pub fn mk_raw_path(sp: span, idents: ~[ast::ident]) -> @ast::path { return p; } pub fn mk_raw_path_(sp: span, - idents: ~[ast::ident], + +idents: ~[ast::ident], +types: ~[@ast::Ty]) -> @ast::path { @ast::path { span: sp, @@ -83,17 +83,17 @@ pub fn mk_raw_path_(sp: span, rp: None, types: types } } -pub fn mk_raw_path_global(sp: span, idents: ~[ast::ident]) -> @ast::path { +pub fn mk_raw_path_global(sp: span, +idents: ~[ast::ident]) -> @ast::path { @ast::path { span: sp, global: true, idents: idents, rp: None, types: ~[] } } -pub fn mk_path(cx: ext_ctxt, sp: span, idents: ~[ast::ident]) -> @ast::expr { +pub fn mk_path(cx: ext_ctxt, sp: span, +idents: ~[ast::ident]) -> @ast::expr { mk_expr(cx, sp, ast::expr_path(mk_raw_path(sp, idents))) } -pub fn mk_path_global(cx: ext_ctxt, sp: span, idents: ~[ast::ident]) +pub fn mk_path_global(cx: ext_ctxt, sp: span, +idents: ~[ast::ident]) -> @ast::expr { mk_expr(cx, sp, ast::expr_path(mk_raw_path_global(sp, idents))) } @@ -101,7 +101,7 @@ pub fn mk_access_(cx: ext_ctxt, sp: span, p: @ast::expr, m: ast::ident) -> @ast::expr { mk_expr(cx, sp, ast::expr_field(p, m, ~[])) } -pub fn mk_access(cx: ext_ctxt, sp: span, p: ~[ast::ident], m: ast::ident) +pub fn mk_access(cx: ext_ctxt, sp: span, +p: ~[ast::ident], m: ast::ident) -> @ast::expr { let pathexpr = mk_path(cx, sp, p); return mk_access_(cx, sp, pathexpr, m); @@ -110,21 +110,21 @@ pub fn mk_addr_of(cx: ext_ctxt, sp: span, e: @ast::expr) -> @ast::expr { return mk_expr(cx, sp, ast::expr_addr_of(ast::m_imm, e)); } pub fn mk_call_(cx: ext_ctxt, sp: span, fn_expr: @ast::expr, - args: ~[@ast::expr]) -> @ast::expr { + +args: ~[@ast::expr]) -> @ast::expr { mk_expr(cx, sp, ast::expr_call(fn_expr, args, ast::NoSugar)) } -pub fn mk_call(cx: ext_ctxt, sp: span, fn_path: ~[ast::ident], - args: ~[@ast::expr]) -> @ast::expr { +pub fn mk_call(cx: ext_ctxt, sp: span, +fn_path: ~[ast::ident], + +args: ~[@ast::expr]) -> @ast::expr { let pathexpr = mk_path(cx, sp, fn_path); return mk_call_(cx, sp, pathexpr, args); } -pub fn mk_call_global(cx: ext_ctxt, sp: span, fn_path: ~[ast::ident], - args: ~[@ast::expr]) -> @ast::expr { +pub fn mk_call_global(cx: ext_ctxt, sp: span, +fn_path: ~[ast::ident], + +args: ~[@ast::expr]) -> @ast::expr { let pathexpr = mk_path_global(cx, sp, fn_path); return mk_call_(cx, sp, pathexpr, args); } // e = expr, t = type -pub fn mk_base_vec_e(cx: ext_ctxt, sp: span, exprs: ~[@ast::expr]) +pub fn mk_base_vec_e(cx: ext_ctxt, sp: span, +exprs: ~[@ast::expr]) -> @ast::expr { let vecexpr = ast::expr_vec(exprs, ast::m_imm); mk_expr(cx, sp, vecexpr) @@ -134,25 +134,25 @@ pub fn mk_vstore_e(cx: ext_ctxt, sp: span, expr: @ast::expr, @ast::expr { mk_expr(cx, sp, ast::expr_vstore(expr, vst)) } -pub fn mk_uniq_vec_e(cx: ext_ctxt, sp: span, exprs: ~[@ast::expr]) +pub fn mk_uniq_vec_e(cx: ext_ctxt, sp: span, +exprs: ~[@ast::expr]) -> @ast::expr { mk_vstore_e(cx, sp, mk_base_vec_e(cx, sp, exprs), ast::expr_vstore_uniq) } -pub fn mk_slice_vec_e(cx: ext_ctxt, sp: span, exprs: ~[@ast::expr]) +pub fn mk_slice_vec_e(cx: ext_ctxt, sp: span, +exprs: ~[@ast::expr]) -> @ast::expr { mk_vstore_e(cx, sp, mk_base_vec_e(cx, sp, exprs), ast::expr_vstore_slice) } -pub fn mk_fixed_vec_e(cx: ext_ctxt, sp: span, exprs: ~[@ast::expr]) +pub fn mk_fixed_vec_e(cx: ext_ctxt, sp: span, +exprs: ~[@ast::expr]) -> @ast::expr { mk_vstore_e(cx, sp, mk_base_vec_e(cx, sp, exprs), ast::expr_vstore_fixed(None)) } -pub fn mk_base_str(cx: ext_ctxt, sp: span, s: ~str) -> @ast::expr { +pub fn mk_base_str(cx: ext_ctxt, sp: span, +s: ~str) -> @ast::expr { let lit = ast::lit_str(@s); return mk_lit(cx, sp, lit); } -pub fn mk_uniq_str(cx: ext_ctxt, sp: span, s: ~str) -> @ast::expr { +pub fn mk_uniq_str(cx: ext_ctxt, sp: span, +s: ~str) -> @ast::expr { mk_vstore_e(cx, sp, mk_base_str(cx, sp, s), ast::expr_vstore_uniq) } pub fn mk_field(sp: span, f: &Field) -> ast::field { @@ -164,28 +164,36 @@ pub fn mk_field(sp: span, f: &Field) -> ast::field { pub fn mk_fields(sp: span, fields: ~[Field]) -> ~[ast::field] { fields.map(|f| mk_field(sp, f)) } -pub fn mk_rec_e(cx: ext_ctxt, sp: span, fields: ~[Field]) -> @ast::expr { +pub fn mk_rec_e(cx: ext_ctxt, + sp: span, + +fields: ~[Field]) + -> @ast::expr { mk_expr(cx, sp, ast::expr_rec(mk_fields(sp, fields), option::None::<@ast::expr>)) } -pub fn mk_struct_e(cx: ext_ctxt, sp: span, ctor_path: ~[ast::ident], - fields: ~[Field]) -> @ast::expr { +pub fn mk_struct_e(cx: ext_ctxt, + sp: span, + +ctor_path: ~[ast::ident], + +fields: ~[Field]) + -> @ast::expr { mk_expr(cx, sp, ast::expr_struct(mk_raw_path(sp, ctor_path), mk_fields(sp, fields), option::None::<@ast::expr>)) } -pub fn mk_global_struct_e(cx: ext_ctxt, sp: span, - ctor_path: ~[ast::ident], - fields: ~[Field]) +pub fn mk_global_struct_e(cx: ext_ctxt, + sp: span, + +ctor_path: ~[ast::ident], + +fields: ~[Field]) -> @ast::expr { mk_expr(cx, sp, ast::expr_struct(mk_raw_path_global(sp, ctor_path), mk_fields(sp, fields), option::None::<@ast::expr>)) } -pub fn mk_glob_use(cx: ext_ctxt, sp: span, path: ~[ast::ident]) - -> @ast::view_item { +pub fn mk_glob_use(cx: ext_ctxt, + sp: span, + +path: ~[ast::ident]) -> @ast::view_item { let glob = @codemap::spanned { node: ast::view_path_glob(mk_raw_path(sp, path), cx.next_id()), span: sp, @@ -221,8 +229,8 @@ pub fn mk_local(cx: ext_ctxt, sp: span, mutbl: bool, @codemap::spanned { node: ast::stmt_decl(@decl, cx.next_id()), span: sp } } pub fn mk_block(cx: ext_ctxt, span: span, - view_items: ~[@ast::view_item], - stmts: ~[@ast::stmt], + +view_items: ~[@ast::view_item], + +stmts: ~[@ast::stmt], expr: Option<@ast::expr>) -> @ast::expr { let blk = codemap::spanned { node: ast::blk_ { @@ -316,7 +324,7 @@ pub fn mk_stmt(cx: ext_ctxt, span: span, expr: @ast::expr) -> @ast::stmt { } pub fn mk_ty_path(cx: ext_ctxt, span: span, - idents: ~[ ast::ident ]) + +idents: ~[ ast::ident ]) -> @ast::Ty { let ty = build::mk_raw_path(span, idents); let ty = ast::ty_path(ty, cx.next_id()); @@ -325,7 +333,7 @@ pub fn mk_ty_path(cx: ext_ctxt, } pub fn mk_ty_path_global(cx: ext_ctxt, span: span, - idents: ~[ ast::ident ]) + +idents: ~[ ast::ident ]) -> @ast::Ty { let ty = build::mk_raw_path_global(span, idents); let ty = ast::ty_path(ty, cx.next_id()); diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 1dc5350c452ff..4f53bf62efbab 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -16,7 +16,7 @@ use ext::base::*; use ext::base; use parse::token; -pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { let mut res_str = ~""; for tts.eachi |i, e| { diff --git a/src/libsyntax/ext/deriving.rs b/src/libsyntax/ext/deriving.rs index 7820bb5e964fe..50047d2ce4166 100644 --- a/src/libsyntax/ext/deriving.rs +++ b/src/libsyntax/ext/deriving.rs @@ -62,7 +62,7 @@ type ExpandDerivingEnumDefFn = &fn(ext_ctxt, pub fn expand_deriving_eq(cx: ext_ctxt, span: span, - _mitem: meta_item, + _mitem: @meta_item, in_items: ~[@item]) -> ~[@item] { expand_deriving(cx, @@ -74,7 +74,7 @@ pub fn expand_deriving_eq(cx: ext_ctxt, pub fn expand_deriving_iter_bytes(cx: ext_ctxt, span: span, - _mitem: meta_item, + _mitem: @meta_item, in_items: ~[@item]) -> ~[@item] { expand_deriving(cx, @@ -490,8 +490,8 @@ fn call_substructure_iter_bytes_method(cx: ext_ctxt, fn variant_arg_count(cx: ext_ctxt, span: span, variant: &variant) -> uint { match variant.node.kind { - tuple_variant_kind(args) => args.len(), - struct_variant_kind(struct_def) => struct_def.fields.len(), + tuple_variant_kind(ref args) => args.len(), + struct_variant_kind(ref struct_def) => struct_def.fields.len(), enum_variant_kind(*) => { cx.span_bug(span, ~"variant_arg_count: enum variants deprecated") } @@ -856,7 +856,7 @@ fn expand_deriving_eq_struct_tuple_method(cx: ext_ctxt, let self_str = ~"self"; let other_str = ~"__other"; let type_path = build::mk_raw_path(span, ~[type_ident]); - let fields = struct_def.fields; + let fields = copy struct_def.fields; // Create comparison expression, comparing each of the fields let mut match_body = None; diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index ce87c2f1363d7..91a9de9c051e8 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -15,16 +15,15 @@ * interface. */ +use prelude::*; + use ast; use codemap::span; use ext::base::*; use ext::base; use ext::build::mk_uniq_str; -use core::option; -use core::os; - -pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { let var = get_single_str_from_tts(cx, sp, tts, "env!"); @@ -33,8 +32,8 @@ pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) // Option rather than just an maybe-empty string. let e = match os::getenv(var) { - option::None => mk_uniq_str(cx, sp, ~""), - option::Some(ref s) => mk_uniq_str(cx, sp, (*s)) + None => mk_uniq_str(cx, sp, ~""), + Some(ref s) => mk_uniq_str(cx, sp, copy *s) }; MRExpr(e) } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 1a67a569845e6..e3408a47c9a40 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -27,64 +27,73 @@ use core::vec; use core::hashmap::LinearMap; pub fn expand_expr(extsbox: @mut SyntaxEnv, cx: ext_ctxt, - e: expr_, s: span, fld: ast_fold, - orig: fn@(expr_, span, ast_fold) -> (expr_, span)) + e: &expr_, s: span, fld: ast_fold, + orig: fn@(&expr_, span, ast_fold) -> (expr_, span)) -> (expr_, span) { - return match e { - // expr_mac should really be expr_ext or something; it's the - // entry-point for all syntax extensions. - expr_mac(ref mac) => { - + match *e { + // expr_mac should really be expr_ext or something; it's the + // entry-point for all syntax extensions. + expr_mac(ref mac) => { match (*mac).node { - - // Token-tree macros, these will be the only case when we're - // finished transitioning. - mac_invoc_tt(pth, ref tts) => { - assert (vec::len(pth.idents) == 1u); - /* using idents and token::special_idents would make the - the macro names be hygienic */ - let extname = cx.parse_sess().interner.get(pth.idents[0]); - // leaving explicit deref here to highlight unbox op: - match (*extsbox).find(&extname) { - None => { - cx.span_fatal(pth.span, - fmt!("macro undefined: '%s'", *extname)) - } - Some(@SE(NormalTT(SyntaxExpanderTT{expander: exp, - span: exp_sp}))) => { - cx.bt_push(ExpandedFrom(CallInfo{ - call_site: s, - callee: NameAndSpan { - name: *extname, span: exp_sp + // Token-tree macros, these will be the only case when we're + // finished transitioning. + mac_invoc_tt(pth, ref tts) => { + assert (vec::len(pth.idents) == 1u); + /* using idents and token::special_idents would make the + the macro names be hygienic */ + let extname = cx.parse_sess().interner.get(pth.idents[0]); + // leaving explicit deref here to highlight unbox op: + match (*extsbox).find(&extname) { + None => { + cx.span_fatal( + pth.span, + fmt!("macro undefined: '%s'", *extname)) } - })); - - let expanded = match exp(cx, (*mac).span, (*tts)) { - MRExpr(e) => e, - MRAny(expr_maker,_,_) => expr_maker(), - _ => cx.span_fatal( - pth.span, fmt!("non-expr macro in expr pos: %s", - *extname)) - }; - - //keep going, outside-in - let fully_expanded = fld.fold_expr(expanded).node; - cx.bt_pop(); - - (fully_expanded, s) - } - _ => { - cx.span_fatal(pth.span, - fmt!("'%s' is not a tt-style macro", - *extname)) - } - + Some(@SE(NormalTT(SyntaxExpanderTT{ + expander: exp, + span: exp_sp + }))) => { + cx.bt_push(ExpandedFrom(CallInfo { + call_site: s, + callee: NameAndSpan { + name: copy *extname, + span: exp_sp, + }, + })); + + let expanded = match exp(cx, mac.span, *tts) { + MRExpr(e) => e, + MRAny(expr_maker,_,_) => expr_maker(), + _ => { + cx.span_fatal( + pth.span, + fmt!( + "non-expr macro in expr pos: %s", + *extname + ) + ) + } + }; + + //keep going, outside-in + let fully_expanded = + copy fld.fold_expr(expanded).node; + cx.bt_pop(); + + (fully_expanded, s) + } + _ => { + cx.span_fatal( + pth.span, + fmt!("'%s' is not a tt-style macro", *extname) + ) + } + } } - } } - } - _ => orig(e, s, fld) - }; + } + _ => orig(e, s, fld) + } } // This is a secondary mechanism for invoking syntax extensions on items: @@ -97,8 +106,8 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv, cx: ext_ctxt, // NB: there is some redundancy between this and expand_item, below, and // they might benefit from some amount of semantic and language-UI merger. pub fn expand_mod_items(extsbox: @mut SyntaxEnv, cx: ext_ctxt, - module_: ast::_mod, fld: ast_fold, - orig: fn@(ast::_mod, ast_fold) -> ast::_mod) + module_: &ast::_mod, fld: ast_fold, + orig: fn@(&ast::_mod, ast_fold) -> ast::_mod) -> ast::_mod { // Fold the contents first: let module_ = orig(module_, fld); @@ -146,8 +155,8 @@ macro_rules! with_exts_frame ( // When we enter a module, record it, for the sake of `module!` pub fn expand_item(extsbox: @mut SyntaxEnv, - cx: ext_ctxt, &&it: @ast::item, fld: ast_fold, - orig: fn@(&&v: @ast::item, ast_fold) -> Option<@ast::item>) + cx: ext_ctxt, it: @ast::item, fld: ast_fold, + orig: fn@(@ast::item, ast_fold) -> Option<@ast::item>) -> Option<@ast::item> { // need to do expansion first... it might turn out to be a module. let maybe_it = match it.node { @@ -225,7 +234,7 @@ pub fn expand_item_mac(+extsbox: @mut SyntaxEnv, let (pth, tts) = match it.node { item_mac(codemap::spanned { node: mac_invoc_tt(pth, ref tts), _}) => { - (pth, (*tts)) + (pth, copy *tts) } _ => cx.span_bug(it.span, ~"invalid item macro invocation") }; @@ -245,8 +254,8 @@ pub fn expand_item_mac(+extsbox: @mut SyntaxEnv, cx.bt_push(ExpandedFrom(CallInfo { call_site: it.span, callee: NameAndSpan { - name: *extname, - span: (*expand).span + name: copy *extname, + span: expand.span } })); ((*expand).expander)(cx, it.span, tts) @@ -260,8 +269,8 @@ pub fn expand_item_mac(+extsbox: @mut SyntaxEnv, cx.bt_push(ExpandedFrom(CallInfo { call_site: it.span, callee: NameAndSpan { - name: *extname, - span: (*expand).span + name: copy *extname, + span: expand.span } })); ((*expand).expander)(cx, it.span, it.ident, tts) @@ -288,14 +297,16 @@ pub fn expand_item_mac(+extsbox: @mut SyntaxEnv, // expand a stmt pub fn expand_stmt(extsbox: @mut SyntaxEnv, cx: ext_ctxt, - && s: stmt_, sp: span, fld: ast_fold, - orig: fn@(&&s: stmt_, span, ast_fold) -> (stmt_, span)) + s: &stmt_, sp: span, fld: ast_fold, + orig: fn@(s: &stmt_, span, ast_fold) -> (stmt_, span)) -> (stmt_, span) { - let (mac, pth, tts, semi) = match s { + let (mac, pth, tts, semi) = match *s { stmt_mac(ref mac, semi) => { - match (*mac).node { - mac_invoc_tt(pth, ref tts) => ((*mac), pth, (*tts), semi) + match mac.node { + mac_invoc_tt(pth, ref tts) => { + (copy *mac, pth, copy *tts, semi) + } } } _ => return orig(s, sp, fld) @@ -311,7 +322,7 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv, cx: ext_ctxt, SyntaxExpanderTT{expander: exp, span: exp_sp}))) => { cx.bt_push(ExpandedFrom(CallInfo { call_site: sp, - callee: NameAndSpan { name: *extname, span: exp_sp } + callee: NameAndSpan { name: copy *extname, span: exp_sp } })); let expanded = match exp(cx, mac.span, tts) { MRExpr(e) => @@ -324,7 +335,7 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv, cx: ext_ctxt, }; //keep going, outside-in - let fully_expanded = fld.fold_stmt(expanded).node; + let fully_expanded = copy fld.fold_stmt(expanded).node; cx.bt_pop(); (fully_expanded, sp) @@ -346,8 +357,8 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv, cx: ext_ctxt, pub fn expand_block(extsbox: @mut SyntaxEnv, cx: ext_ctxt, - && blk: blk_, sp: span, fld: ast_fold, - orig: fn@(&&s: blk_, span, ast_fold) -> (blk_, span)) + blk: &blk_, sp: span, fld: ast_fold, + orig: fn@(&blk_, span, ast_fold) -> (blk_, span)) -> (blk_, span) { match (*extsbox).find(&@~" block") { // no scope limit on macros in this block, no need @@ -433,7 +444,7 @@ pub fn expand_crate(parse_sess: @mut parse::ParseSess, // every method/element of AstFoldFns in fold.rs. let extsbox = @mut syntax_expander_table(); let afp = default_ast_fold(); - let cx: ext_ctxt = mk_ctxt(parse_sess, cfg); + let cx: ext_ctxt = mk_ctxt(parse_sess, copy cfg); let f_pre = @AstFoldFns { fold_expr: |expr,span,recur| expand_expr(extsbox, cx, expr, span, recur, afp.fold_expr), @@ -444,23 +455,30 @@ pub fn expand_crate(parse_sess: @mut parse::ParseSess, fold_stmt: |stmt,span,recur| expand_stmt(extsbox, cx, stmt, span, recur, afp.fold_stmt), fold_block: |blk,span,recur| - expand_block (extsbox, cx, blk, span, recur, afp.fold_block), + expand_block(extsbox, cx, blk, span, recur, afp.fold_block), new_span: |a| new_span(cx, a), .. *afp}; let f = make_fold(f_pre); // add a bunch of macros as though they were placed at the // head of the program (ick). - let attrs = ~[spanned {span:codemap::dummy_sp(), - node: attribute_ - {style:attr_outer, - value:spanned - {node:meta_word(@~"macro_escape"), - span:codemap::dummy_sp()}, - is_sugared_doc:false}}]; + let attrs = ~[ + spanned { + span: codemap::dummy_sp(), + node: attribute_ { + style: attr_outer, + value: @spanned { + node: meta_word(@~"macro_escape"), + span: codemap::dummy_sp(), + }, + is_sugared_doc: false, + } + } + ]; let cm = match parse_item_from_source_str(~"", @core_macros(), - cfg,attrs, + copy cfg, + attrs, parse_sess) { Some(item) => item, None => cx.bug(~"expected core macros to parse correctly") @@ -469,8 +487,7 @@ pub fn expand_crate(parse_sess: @mut parse::ParseSess, // as it registers all the core macros as expanders. f.fold_item(cm); - let res = @f.fold_crate(*c); - return res; + @f.fold_crate(&*c) } #[cfg(test)] @@ -575,13 +592,17 @@ mod test { // make a "meta_word" outer attribute with the given name fn make_dummy_attr(s: @~str) -> ast::attribute { - spanned {span:codemap::dummy_sp(), - node: attribute_ - {style:attr_outer, - value:spanned - {node:meta_word(s), - span:codemap::dummy_sp()}, - is_sugared_doc:false}} + spanned { + span:codemap::dummy_sp(), + node: attribute_ { + style: attr_outer, + value: @spanned { + node: meta_word(s), + span: codemap::dummy_sp(), + }, + is_sugared_doc: false, + } + } } } diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs index 3861db28e1d93..e06e43f6287ce 100644 --- a/src/libsyntax/ext/fmt.rs +++ b/src/libsyntax/ext/fmt.rs @@ -26,9 +26,9 @@ use ext::build; use ext::build::*; use unstable::extfmt::ct::*; -pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { - let args = get_exprs_from_tts(cx, copy tts); + let args = get_exprs_from_tts(cx, tts); if args.len() == 0 { cx.span_fatal(sp, "fmt! takes at least 1 argument."); } @@ -277,9 +277,9 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, for pieces.each |pc| { match *pc { PieceString(ref s) => { - piece_exprs.push(mk_uniq_str(cx, fmt_sp, (*s))) + piece_exprs.push(mk_uniq_str(cx, fmt_sp, copy *s)) } - PieceConv(conv) => { + PieceConv(ref conv) => { n += 1u; if n >= nargs { cx.span_fatal(sp, @@ -287,9 +287,14 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, ~"for the given format string"); } debug!("Building conversion:"); - log_conv(conv); + log_conv(/*bad*/ copy *conv); let arg_expr = args[n]; - let c_expr = make_new_conv(cx, fmt_sp, conv, arg_expr); + let c_expr = make_new_conv( + cx, + fmt_sp, + /*bad*/ copy *conv, + arg_expr + ); piece_exprs.push(c_expr); } } diff --git a/src/libsyntax/ext/log_syntax.rs b/src/libsyntax/ext/log_syntax.rs index 15ddc44e85d4f..368520acd2d14 100644 --- a/src/libsyntax/ext/log_syntax.rs +++ b/src/libsyntax/ext/log_syntax.rs @@ -8,24 +8,25 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use prelude::*; +use core::io::WriterUtil; + use ast; use codemap; use ext::base::*; use ext::base; use print; -use core::io; -use core::io::WriterUtil; -use core::option; - pub fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, - tt: ~[ast::token_tree]) + tt: &[ast::token_tree]) -> base::MacResult { cx.print_backtrace(); io::stdout().write_line( - print::pprust::tt_to_str(ast::tt_delim(tt),cx.parse_sess().interner)); + print::pprust::tt_to_str( + ast::tt_delim(vec::from_slice(tt)), + cx.parse_sess().interner)); //trivial expression MRExpr(@ast::expr { diff --git a/src/libsyntax/ext/pipes/ast_builder.rs b/src/libsyntax/ext/pipes/ast_builder.rs index 6e1406e364723..3b885b7a7b976 100644 --- a/src/libsyntax/ext/pipes/ast_builder.rs +++ b/src/libsyntax/ext/pipes/ast_builder.rs @@ -35,7 +35,7 @@ mod syntax { pub use parse; } -pub fn path(ids: ~[ident], span: span) -> @ast::path { +pub fn path(+ids: ~[ident], span: span) -> @ast::path { @ast::path { span: span, global: false, idents: ids, @@ -43,7 +43,7 @@ pub fn path(ids: ~[ident], span: span) -> @ast::path { types: ~[] } } -pub fn path_global(ids: ~[ident], span: span) -> @ast::path { +pub fn path_global(+ids: ~[ident], span: span) -> @ast::path { @ast::path { span: span, global: true, idents: ids, @@ -52,19 +52,23 @@ pub fn path_global(ids: ~[ident], span: span) -> @ast::path { } pub trait append_types { - fn add_ty(ty: @ast::Ty) -> @ast::path; - fn add_tys(+tys: ~[@ast::Ty]) -> @ast::path; + fn add_ty(&self, ty: @ast::Ty) -> @ast::path; + fn add_tys(&self, +tys: ~[@ast::Ty]) -> @ast::path; } impl append_types for @ast::path { - fn add_ty(ty: @ast::Ty) -> @ast::path { - @ast::path { types: vec::append_one(self.types, ty), - .. *self} + fn add_ty(&self, ty: @ast::Ty) -> @ast::path { + @ast::path { + types: vec::append_one(copy self.types, ty), + .. copy **self + } } - fn add_tys(+tys: ~[@ast::Ty]) -> @ast::path { - @ast::path { types: vec::append(self.types, tys), - .. *self} + fn add_tys(&self, +tys: ~[@ast::Ty]) -> @ast::path { + @ast::path { + types: vec::append(copy self.types, tys), + .. copy **self + } } } @@ -75,34 +79,49 @@ pub trait ext_ctxt_ast_builder { fn expr_block(&self, e: @ast::expr) -> ast::blk; fn fn_decl(&self, +inputs: ~[ast::arg], output: @ast::Ty) -> ast::fn_decl; fn item(&self, name: ident, span: span, +node: ast::item_) -> @ast::item; - fn item_fn_poly(&self, name: ident, + fn item_fn_poly(&self, + ame: ident, +inputs: ~[ast::arg], output: @ast::Ty, +generics: Generics, +body: ast::blk) -> @ast::item; - fn item_fn(&self, name: ident, + fn item_fn(&self, + name: ident, +inputs: ~[ast::arg], output: @ast::Ty, +body: ast::blk) -> @ast::item; - fn item_enum_poly(&self, name: ident, + fn item_enum_poly(&self, + name: ident, span: span, +enum_definition: ast::enum_def, +generics: Generics) -> @ast::item; - fn item_enum(&self, name: ident, span: span, + fn item_enum(&self, + name: ident, + span: span, +enum_definition: ast::enum_def) -> @ast::item; - fn item_struct_poly(&self, name: ident, span: span, - struct_def: ast::struct_def, + fn item_struct_poly(&self, + name: ident, + span: span, + +struct_def: ast::struct_def, +generics: Generics) -> @ast::item; - fn item_struct(&self, name: ident, span: span, - struct_def: ast::struct_def) -> @ast::item; - fn struct_expr(&self, path: @ast::path, - fields: ~[ast::field]) -> @ast::expr; - fn variant(&self, name: ident, span: span, + fn item_struct(&self, + name: ident, + span: span, + +struct_def: ast::struct_def) -> @ast::item; + fn struct_expr(&self, + path: @ast::path, + +fields: ~[ast::field]) -> @ast::expr; + fn variant(&self, + name: ident, + span: span, +tys: ~[@ast::Ty]) -> ast::variant; - fn item_mod(&self, name: ident, span: span, + fn item_mod(&self, + name: ident, + span: span, +items: ~[@ast::item]) -> @ast::item; fn ty_path_ast_builder(&self, path: @ast::path) -> @ast::Ty; - fn item_ty_poly(&self, name: ident, + fn item_ty_poly(&self, + name: ident, span: span, ty: @ast::Ty, +generics: Generics) -> @ast::item; @@ -114,7 +133,7 @@ pub trait ext_ctxt_ast_builder { fn block(&self, +stmts: ~[@ast::stmt], e: @ast::expr) -> ast::blk; fn stmt_let(&self, ident: ident, e: @ast::expr) -> @ast::stmt; fn stmt_expr(&self, e: @ast::expr) -> @ast::stmt; - fn block_expr(&self, b: ast::blk) -> @ast::expr; + fn block_expr(&self, +b: ast::blk) -> @ast::expr; fn ty_option(&self, ty: @ast::Ty) -> @ast::Ty; fn ty_infer(&self) -> @ast::Ty; fn ty_nil_ast_builder(&self) -> @ast::Ty; @@ -130,7 +149,7 @@ impl ext_ctxt_ast_builder for ext_ctxt { ], dummy_sp()).add_ty(ty)) } - fn block_expr(&self, b: ast::blk) -> @ast::expr { + fn block_expr(&self, +b: ast::blk) -> @ast::expr { @expr { id: self.next_id(), callee_id: self.next_id(), @@ -229,7 +248,7 @@ impl ext_ctxt_ast_builder for ext_ctxt { // Rust coding conventions let non_camel_case_attribute = respan(dummy_sp(), ast::attribute_ { style: ast::attr_outer, - value: respan(dummy_sp(), + value: @respan(dummy_sp(), ast::meta_list(@~"allow", ~[ @respan(dummy_sp(), ast::meta_word( @@ -259,12 +278,19 @@ impl ext_ctxt_ast_builder for ext_ctxt { body)) } - fn item_fn(&self, name: ident, + fn item_fn(&self, + name: ident, +inputs: ~[ast::arg], output: @ast::Ty, - +body: ast::blk) -> @ast::item { - self.item_fn_poly(name, inputs, output, - ast_util::empty_generics(), body) + +body: ast::blk + ) -> @ast::item { + self.item_fn_poly( + name, + inputs, + output, + ast_util::empty_generics(), + body + ) } fn item_enum_poly(&self, name: ident, span: span, @@ -279,20 +305,31 @@ impl ext_ctxt_ast_builder for ext_ctxt { ast_util::empty_generics()) } - fn item_struct(&self, name: ident, span: span, - struct_def: ast::struct_def) -> @ast::item { - self.item_struct_poly(name, span, struct_def, - ast_util::empty_generics()) + fn item_struct( + &self, name: ident, + span: span, + +struct_def: ast::struct_def + ) -> @ast::item { + self.item_struct_poly( + name, + span, + struct_def, + ast_util::empty_generics() + ) } - fn item_struct_poly(&self, name: ident, span: span, - struct_def: ast::struct_def, - +generics: Generics) -> @ast::item { + fn item_struct_poly( + &self, + name: ident, + span: span, + +struct_def: ast::struct_def, + +generics: Generics + ) -> @ast::item { self.item(name, span, ast::item_struct(@struct_def, generics)) } fn struct_expr(&self, path: @ast::path, - fields: ~[ast::field]) -> @ast::expr { + +fields: ~[ast::field]) -> @ast::expr { @ast::expr { id: self.next_id(), callee_id: self.next_id(), @@ -381,7 +418,10 @@ impl ext_ctxt_ast_builder for ext_ctxt { let new_params = do generics.ty_params.map |ty_param| { ast::TyParam { bounds: no_bounds, ..copy *ty_param } }; - Generics { ty_params: new_params, ..*generics } + Generics { + ty_params: new_params, + .. copy *generics + } } fn item_ty_poly(&self, name: ident, span: span, ty: @ast::Ty, diff --git a/src/libsyntax/ext/pipes/liveness.rs b/src/libsyntax/ext/pipes/liveness.rs index a7f01d7564859..c5bed32a24f2a 100644 --- a/src/libsyntax/ext/pipes/liveness.rs +++ b/src/libsyntax/ext/pipes/liveness.rs @@ -88,7 +88,7 @@ pub fn analyze(proto: protocol, _cx: ext_ctxt) { } if self_live.len() > 0 { - let states = str::connect(self_live.map(|s| s.name), ~" "); + let states = str::connect(self_live.map(|s| copy s.name), ~" "); debug!("protocol %s is unbounded due to loops involving: %s", proto.name, states); diff --git a/src/libsyntax/ext/pipes/mod.rs b/src/libsyntax/ext/pipes/mod.rs index 8b8e48bd5229b..c26bfd178b024 100644 --- a/src/libsyntax/ext/pipes/mod.rs +++ b/src/libsyntax/ext/pipes/mod.rs @@ -65,11 +65,11 @@ pub mod liveness; pub fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident, - tt: ~[ast::token_tree]) -> base::MacResult { + tt: ~[ast::token_tree]) -> base::MacResult { let sess = cx.parse_sess(); let cfg = cx.cfg(); let tt_rdr = new_tt_reader(copy cx.parse_sess().span_diagnostic, - cx.parse_sess().interner, None, tt); + cx.parse_sess().interner, None, copy tt); let rdr = tt_rdr as reader; let rust_parser = Parser(sess, cfg, rdr.dup()); diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs index a6b820cf3f908..a5ca24c67378c 100644 --- a/src/libsyntax/ext/pipes/parse_proto.rs +++ b/src/libsyntax/ext/pipes/parse_proto.rs @@ -20,31 +20,35 @@ use parse::token; use core::prelude::*; pub trait proto_parser { - fn parse_proto(&self, id: ~str) -> protocol; + fn parse_proto(&self, +id: ~str) -> protocol; fn parse_state(&self, proto: protocol); fn parse_message(&self, state: state); } impl proto_parser for parser::Parser { - fn parse_proto(&self, id: ~str) -> protocol { + fn parse_proto(&self, +id: ~str) -> protocol { let proto = protocol(id, *self.span); - self.parse_seq_to_before_end(token::EOF, SeqSep { - sep: None, - trailing_sep_allowed: false - }, |self| self.parse_state(proto)); + self.parse_seq_to_before_end( + &token::EOF, + SeqSep { + sep: None, + trailing_sep_allowed: false, + }, + |self| self.parse_state(proto) + ); return proto; } fn parse_state(&self, proto: protocol) { let id = self.parse_ident(); - let name = *self.interner.get(id); + let name = copy *self.interner.get(id); - self.expect(token::COLON); - let dir = match *self.token { - token::IDENT(n, _) => self.interner.get(n), - _ => fail!() + self.expect(&token::COLON); + let dir = match copy *self.token { + token::IDENT(n, _) => self.interner.get(n), + _ => fail!() }; self.bump(); let dir = match dir { @@ -63,35 +67,47 @@ impl proto_parser for parser::Parser { // parse the messages self.parse_unspanned_seq( - token::LBRACE, token::RBRACE, SeqSep { + &token::LBRACE, + &token::RBRACE, + SeqSep { sep: Some(token::COMMA), - trailing_sep_allowed: true - }, |self| self.parse_message(state)); + trailing_sep_allowed: true, + }, + |self| self.parse_message(state) + ); } fn parse_message(&self, state: state) { - let mname = *self.interner.get(self.parse_ident()); + let mname = copy *self.interner.get(self.parse_ident()); let args = if *self.token == token::LPAREN { - self.parse_unspanned_seq(token::LPAREN, - token::RPAREN, SeqSep { - sep: Some(token::COMMA), - trailing_sep_allowed: true - }, |p| p.parse_ty(false)) + self.parse_unspanned_seq( + &token::LPAREN, + &token::RPAREN, + SeqSep { + sep: Some(token::COMMA), + trailing_sep_allowed: true, + }, + |p| p.parse_ty(false) + ) } else { ~[] }; - self.expect(token::RARROW); + self.expect(&token::RARROW); let next = match *self.token { token::IDENT(_, _) => { - let name = *self.interner.get(self.parse_ident()); + let name = copy *self.interner.get(self.parse_ident()); let ntys = if *self.token == token::LT { - self.parse_unspanned_seq(token::LT, - token::GT, SeqSep { - sep: Some(token::COMMA), - trailing_sep_allowed: true - }, |p| p.parse_ty(false)) + self.parse_unspanned_seq( + &token::LT, + &token::GT, + SeqSep { + sep: Some(token::COMMA), + trailing_sep_allowed: true, + }, + |p| p.parse_ty(false) + ) } else { ~[] }; Some(next_state {state: name, tys: ntys}) diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 46f10cd52bbfc..968a0e5f77eb4 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -60,8 +60,7 @@ impl gen_send for message { let next = this.proto.get_state(next_state.state); assert next_state.tys.len() == next.generics.ty_params.len(); let arg_names = tys.mapi(|i, _ty| cx.ident_of(~"x_"+i.to_str())); - - let args_ast = (arg_names, *tys).map(|n, t| cx.arg(*n, *t)); + let args_ast = vec::map2(arg_names, *tys, |n, t| cx.arg(*n, *t)); let pipe_ty = cx.ty_path_ast_builder( path(~[this.data_name()], span) @@ -121,7 +120,7 @@ impl gen_send for message { let mut rty = cx.ty_path_ast_builder(path(~[next.data_name()], span) - .add_tys(next_state.tys)); + .add_tys(copy next_state.tys)); if try { rty = cx.ty_option(rty); } @@ -139,7 +138,7 @@ impl gen_send for message { debug!("pipec: no next state"); let arg_names = tys.mapi(|i, _ty| (~"x_" + i.to_str())); - let args_ast = do (arg_names, *tys).map |n, t| { + let args_ast = do vec::map2(arg_names, *tys) |n, t| { cx.arg(cx.ident_of(*n), *t) }; @@ -155,7 +154,7 @@ impl gen_send for message { ~"" } else { - ~"(" + str::connect(arg_names.map(|x| *x), + ~"(" + str::connect(arg_names.map(|x| copy *x), ~", ") + ~")" }; @@ -212,7 +211,7 @@ impl to_type_decls for state { let mut items_msg = ~[]; for self.messages.each |m| { - let message(name, span, tys, this, next) = *m; + let message(name, span, tys, this, next) = copy *m; let tys = match next { Some(ref next_state) => { @@ -228,7 +227,7 @@ impl to_type_decls for state { cx.ty_path_ast_builder( path(~[cx.ident_of(dir), cx.ident_of(next_name)], span) - .add_tys(next_state.tys))) + .add_tys(copy next_state.tys))) } None => tys }; diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index 6873baf731fde..db5c6541e264d 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -50,7 +50,7 @@ pub struct message(~str, span, ~[@ast::Ty], state, Option); pub impl message { fn name(&mut self) -> ~str { match *self { - message(ref id, _, _, _, _) => (*id) + message(ref id, _, _, _, _) => copy *id } } @@ -63,7 +63,7 @@ pub impl message { /// Return the type parameters actually used by this message fn get_generics(&self) -> ast::Generics { match *self { - message(_, _, _, this, _) => this.generics + message(_, _, _, this, _) => copy this.generics } } } @@ -82,8 +82,8 @@ pub struct state_ { } pub impl state_ { - fn add_message(@self, name: ~str, span: span, - +data: ~[@ast::Ty], next: Option) { + fn add_message(@self, +name: ~str, span: span, + +data: ~[@ast::Ty], +next: Option) { self.messages.push(message(name, span, data, self, next)); } @@ -120,11 +120,11 @@ pub impl state_ { pub type protocol = @mut protocol_; -pub fn protocol(name: ~str, +span: span) -> protocol { +pub fn protocol(+name: ~str, +span: span) -> protocol { @mut protocol_(name, span) } -pub fn protocol_(name: ~str, span: span) -> protocol_ { +pub fn protocol_(+name: ~str, span: span) -> protocol_ { protocol_ { name: name, span: span, @@ -174,7 +174,7 @@ pub impl protocol_ { } pub impl protocol { - fn add_state_poly(&self, name: ~str, ident: ast::ident, dir: direction, + fn add_state_poly(&self, +name: ~str, ident: ast::ident, dir: direction, +generics: ast::Generics) -> state { let messages = @mut ~[]; diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 757302c78fc1a..4349ffaea4301 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -221,19 +221,19 @@ pub mod rt { pub fn expand_quote_tokens(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> base::MacResult { + tts: &[ast::token_tree]) -> base::MacResult { base::MRExpr(expand_tts(cx, sp, tts)) } pub fn expand_quote_expr(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> base::MacResult { + tts: &[ast::token_tree]) -> base::MacResult { base::MRExpr(expand_parse_call(cx, sp, ~"parse_expr", ~[], tts)) } pub fn expand_quote_item(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> base::MacResult { + tts: &[ast::token_tree]) -> base::MacResult { let e_attrs = build::mk_uniq_vec_e(cx, sp, ~[]); base::MRExpr(expand_parse_call(cx, sp, ~"parse_item", ~[e_attrs], tts)) @@ -241,7 +241,7 @@ pub fn expand_quote_item(cx: ext_ctxt, pub fn expand_quote_pat(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> base::MacResult { + tts: &[ast::token_tree]) -> base::MacResult { let e_refutable = build::mk_lit(cx, sp, ast::lit_bool(true)); base::MRExpr(expand_parse_call(cx, sp, ~"parse_pat", ~[e_refutable], tts)) @@ -249,7 +249,7 @@ pub fn expand_quote_pat(cx: ext_ctxt, pub fn expand_quote_ty(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> base::MacResult { + tts: &[ast::token_tree]) -> base::MacResult { let e_param_colons = build::mk_lit(cx, sp, ast::lit_bool(false)); base::MRExpr(expand_parse_call(cx, sp, ~"parse_ty", ~[e_param_colons], tts)) @@ -257,17 +257,17 @@ pub fn expand_quote_ty(cx: ext_ctxt, pub fn expand_quote_stmt(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> base::MacResult { + tts: &[ast::token_tree]) -> base::MacResult { let e_attrs = build::mk_uniq_vec_e(cx, sp, ~[]); base::MRExpr(expand_parse_call(cx, sp, ~"parse_stmt", ~[e_attrs], tts)) } fn ids_ext(cx: ext_ctxt, strs: ~[~str]) -> ~[ast::ident] { - strs.map(|str| cx.parse_sess().interner.intern(@*str)) + strs.map(|str| cx.parse_sess().interner.intern(@copy *str)) } -fn id_ext(cx: ext_ctxt, str: ~str) -> ast::ident { +fn id_ext(cx: ext_ctxt, +str: ~str) -> ast::ident { cx.parse_sess().interner.intern(@str) } @@ -508,7 +508,7 @@ fn mk_tts(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) fn expand_tts(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> @ast::expr { + tts: &[ast::token_tree]) -> @ast::expr { // NB: It appears that the main parser loses its mind if we consider // $foo as a tt_nonterminal during the main parse, so we have to re-parse @@ -516,7 +516,11 @@ fn expand_tts(cx: ext_ctxt, // it has to do with transition away from supporting old-style macros, so // try removing it when enough of them are gone. - let p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts); + let p = parse::new_parser_from_tts( + cx.parse_sess(), + cx.cfg(), + vec::from_slice(tts) + ); *p.quote_depth += 1u; let tts = p.parse_all_token_trees(); p.abort_if_errors(); @@ -578,9 +582,9 @@ fn expand_tts(cx: ext_ctxt, fn expand_parse_call(cx: ext_ctxt, sp: span, - parse_method: ~str, - arg_exprs: ~[@ast::expr], - tts: ~[ast::token_tree]) -> @ast::expr { + +parse_method: ~str, + +arg_exprs: ~[@ast::expr], + tts: &[ast::token_tree]) -> @ast::expr { let tts_expr = expand_tts(cx, sp, tts); let cfg_call = || build::mk_call_( diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 5b870f07b60c4..296305bb62e17 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -29,7 +29,7 @@ use core::vec; // a given file into the current one. /* line!(): expands to the current line number */ -pub fn expand_line(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_line(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { base::check_zero_tts(cx, sp, tts, "line!"); @@ -40,7 +40,7 @@ pub fn expand_line(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) } /* col!(): expands to the current column number */ -pub fn expand_col(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_col(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { base::check_zero_tts(cx, sp, tts, "col!"); @@ -52,7 +52,7 @@ pub fn expand_col(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) /* file!(): expands to the current filename */ /* The filemap (`loc.file`) contains a bunch more information we could spit * out if we wanted. */ -pub fn expand_file(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_file(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { base::check_zero_tts(cx, sp, tts, "file!"); @@ -62,13 +62,13 @@ pub fn expand_file(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) base::MRExpr(mk_base_str(cx, topmost.call_site, filename)) } -pub fn expand_stringify(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_stringify(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { let s = pprust::tts_to_str(tts, cx.parse_sess().interner); base::MRExpr(mk_base_str(cx, sp, s)) } -pub fn expand_mod(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_mod(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { base::check_zero_tts(cx, sp, tts, "module_path!"); base::MRExpr(mk_base_str(cx, sp, @@ -79,7 +79,7 @@ pub fn expand_mod(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) // include! : parse the given file as an expr // This is generally a bad idea because it's going to behave // unhygienically. -pub fn expand_include(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_include(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { let file = get_single_str_from_tts(cx, sp, tts, "include!"); let p = parse::new_sub_parser_from_file( @@ -89,7 +89,7 @@ pub fn expand_include(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) } // include_str! : read the given file, insert it as a literal string expr -pub fn expand_include_str(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_include_str(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { let file = get_single_str_from_tts(cx, sp, tts, "include_str!"); let res = io::read_whole_file_str(&res_rel_file(cx, sp, &Path(file))); @@ -103,7 +103,7 @@ pub fn expand_include_str(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) base::MRExpr(mk_base_str(cx, sp, result::unwrap(res))) } -pub fn expand_include_bin(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_include_bin(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { let file = get_single_str_from_tts(cx, sp, tts, "include_bin!"); match io::read_whole_file(&res_rel_file(cx, sp, &Path(file))) { @@ -121,19 +121,27 @@ pub fn expand_include_bin(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) // recur along an ExpnInfo chain to find the original expression fn topmost_expn_info(expn_info: @codemap::ExpnInfo) -> @codemap::ExpnInfo { - let ExpandedFrom(CallInfo { call_site, _ }) = *expn_info; - match call_site.expn_info { - Some(next_expn_info) => { - let ExpandedFrom(CallInfo { - callee: NameAndSpan {name, _}, - _ - }) = *next_expn_info; - // Don't recurse into file using "include!" - if name == ~"include" { return expn_info; } - - topmost_expn_info(next_expn_info) - }, - None => expn_info + match *expn_info { + ExpandedFrom(CallInfo { call_site: ref call_site, _ }) => { + match call_site.expn_info { + Some(next_expn_info) => { + match *next_expn_info { + ExpandedFrom(CallInfo { + callee: NameAndSpan { name: ref name, _ }, + _ + }) => { + // Don't recurse into file using "include!" + if *name == ~"include" { + expn_info + } else { + topmost_expn_info(next_expn_info) + } + } + } + }, + None => expn_info + } + } } } diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index f662f0337bfbc..bb6d656d5cc12 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use prelude::*; + use ast::tt_delim; use ast; use codemap::span; @@ -16,20 +18,26 @@ use ext::base; use parse::lexer::{new_tt_reader, reader}; use parse::parser::Parser; -use core::option::None; - pub fn expand_trace_macros(cx: ext_ctxt, sp: span, - tt: ~[ast::token_tree]) -> base::MacResult { + tt: &[ast::token_tree]) -> base::MacResult { let sess = cx.parse_sess(); let cfg = cx.cfg(); - let tt_rdr = new_tt_reader(copy cx.parse_sess().span_diagnostic, - cx.parse_sess().interner, None, tt); + let tt_rdr = new_tt_reader( + copy cx.parse_sess().span_diagnostic, + cx.parse_sess().interner, + None, + vec::from_slice(tt) + ); let rdr = tt_rdr as reader; - let rust_parser = Parser(sess, cfg, rdr.dup()); + let rust_parser = Parser( + sess, + copy cfg, + rdr.dup() + ); - if rust_parser.is_keyword(~"true") { + if rust_parser.is_keyword(&~"true") { cx.set_trace_macros(true); - } else if rust_parser.is_keyword(~"false") { + } else if rust_parser.is_keyword(&~"false") { cx.set_trace_macros(false); } else { cx.span_fatal(sp, ~"trace_macros! only accepts `true` or `false`") diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 3fc580827e96f..fae9e1651d342 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -140,10 +140,10 @@ pub fn count_names(ms: &[matcher]) -> uint { } #[allow(non_implicitly_copyable_typarams)] -pub fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: BytePos) +pub fn initial_matcher_pos(+ms: ~[matcher], sep: Option, lo: BytePos) -> ~MatcherPos { let mut match_idx_hi = 0u; - for ms.each() |elt| { + for ms.each |elt| { match elt.node { match_tok(_) => (), match_seq(_,_,_,_,hi) => { @@ -154,12 +154,13 @@ pub fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: BytePos) } } } + let matches = vec::from_fn(count_names(ms), |_i| dvec::DVec()); ~MatcherPos { elts: ms, sep: sep, idx: 0u, up: matcher_pos_up(None), - matches: copy vec::from_fn(count_names(ms), |_i| dvec::DVec()), + matches: matches, match_lo: 0u, match_hi: match_idx_hi, sp_lo: lo @@ -225,8 +226,12 @@ pub enum parse_result { error(codemap::span, ~str) } -pub fn parse_or_else(sess: @mut ParseSess, cfg: ast::crate_cfg, rdr: reader, - ms: ~[matcher]) -> HashMap { +pub fn parse_or_else( + sess: @mut ParseSess, + +cfg: ast::crate_cfg, + rdr: reader, + ms: ~[matcher] +) -> HashMap { match parse(sess, cfg, rdr, ms) { success(m) => m, failure(sp, ref str) => sess.span_diagnostic.span_fatal(sp, (*str)), @@ -234,13 +239,14 @@ pub fn parse_or_else(sess: @mut ParseSess, cfg: ast::crate_cfg, rdr: reader, } } -pub fn parse(sess: @mut ParseSess, - cfg: ast::crate_cfg, - rdr: reader, - ms: ~[matcher]) - -> parse_result { +pub fn parse( + sess: @mut ParseSess, + cfg: ast::crate_cfg, + rdr: reader, + ms: ~[matcher] +) -> parse_result { let mut cur_eis = ~[]; - cur_eis.push(initial_matcher_pos(ms, None, rdr.peek().sp.lo)); + cur_eis.push(initial_matcher_pos(copy ms, None, rdr.peek().sp.lo)); loop { let mut bb_eis = ~[]; // black-box parsed by parser.rs @@ -331,8 +337,8 @@ pub fn parse(sess: @mut ParseSess, |_m| DVec::<@named_match>()); let ei_t = ei; cur_eis.push(~MatcherPos { - elts: (*matchers), - sep: (*sep), + elts: copy *matchers, + sep: copy *sep, idx: 0u, up: matcher_pos_up(Some(ei_t)), matches: matches, @@ -380,7 +386,7 @@ pub fn parse(sess: @mut ParseSess, nts, next_eis.len())); } else if (bb_eis.len() == 0u && next_eis.len() == 0u) { return failure(sp, ~"No rules expected the token: " - + to_str(rdr.interner(), tok)); + + to_str(rdr.interner(), &tok)); } else if (next_eis.len() > 0u) { /* Now process the next token */ while(next_eis.len() > 0u) { @@ -388,7 +394,7 @@ pub fn parse(sess: @mut ParseSess, } rdr.next_token(); } else /* bb_eis.len() == 1 */ { - let rust_parser = Parser(sess, cfg, rdr.dup()); + let rust_parser = Parser(sess, copy cfg, rdr.dup()); let mut ei = bb_eis.pop(); match ei.elts[ei.idx].node { @@ -426,7 +432,7 @@ pub fn parse_nt(p: Parser, name: ~str) -> nonterminal { ~"ident" => match *p.token { token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) } _ => p.fatal(~"expected ident, found " - + token::to_str(p.reader.interner(), *p.token)) + + token::to_str(p.reader.interner(), © *p.token)) }, ~"path" => token::nt_path(p.parse_path_with_tps(false)), ~"tt" => { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index f42bb01f7c21d..5c20abc385d3e 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -54,7 +54,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, // Parse the macro_rules! invocation (`none` is for no interpolations): let arg_reader = new_tt_reader(copy cx.parse_sess().span_diagnostic, - cx.parse_sess().interner, None, arg); + cx.parse_sess().interner, None, copy arg); let argument_map = parse_or_else(cx.parse_sess(), cx.cfg(), arg_reader as reader, argument_gram); @@ -71,7 +71,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, // Given `lhses` and `rhses`, this is the new macro we create fn generic_extension(cx: ext_ctxt, sp: span, name: ident, - arg: ~[ast::token_tree], + arg: &[ast::token_tree], lhses: ~[@named_match], rhses: ~[@named_match]) -> MacResult { @@ -79,7 +79,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, io::println(fmt!("%s! { %s }", cx.str_of(name), print::pprust::tt_to_str( - ast::tt_delim(arg), + ast::tt_delim(vec::from_slice(arg)), cx.parse_sess().interner))); } @@ -94,7 +94,12 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, match *lhs { @matched_nonterminal(nt_matchers(ref mtcs)) => { // `none` is because we're not interpolating - let arg_rdr = new_tt_reader(s_d, itr, None, arg) as reader; + let arg_rdr = new_tt_reader( + s_d, + itr, + None, + vec::from_slice(arg) + ) as reader; match parse(cx.parse_sess(), cx.cfg(), arg_rdr, (*mtcs)) { success(named_matches) => { let rhs = match rhses[i] { @@ -125,7 +130,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, } failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo { best_fail_spot = sp; - best_fail_msg = (*msg); + best_fail_msg = copy *msg; }, error(sp, ref msg) => cx.span_fatal(sp, (*msg)) } @@ -136,11 +141,11 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, cx.span_fatal(best_fail_spot, best_fail_msg); } - let exp: @fn(ext_ctxt, span, ~[ast::token_tree]) -> MacResult = + let exp: @fn(ext_ctxt, span, &[ast::token_tree]) -> MacResult = |cx, sp, arg| generic_extension(cx, sp, name, arg, lhses, rhses); return MRDef(MacroDef{ - name: *cx.parse_sess().interner.get(name), + name: copy *cx.parse_sess().interner.get(name), ext: NormalTT(base::SyntaxExpanderTT{expander: exp, span: Some(sp)}) }); } diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index a9502ff29020e..99afd7958e998 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -54,7 +54,7 @@ pub struct TtReader { pub fn new_tt_reader(sp_diag: span_handler, itr: @ident_interner, interp: Option>, - src: ~[ast::token_tree]) + +src: ~[ast::token_tree]) -> @mut TtReader { let r = @mut TtReader { sp_diag: sp_diag, @@ -101,7 +101,7 @@ pub pure fn dup_tt_reader(r: @mut TtReader) -> @mut TtReader { interpolations: r.interpolations, repeat_idx: copy r.repeat_idx, repeat_len: copy r.repeat_len, - cur_tok: r.cur_tok, + cur_tok: copy r.cur_tok, cur_span: r.cur_span } } @@ -115,7 +115,7 @@ pure fn lookup_cur_matched_by_matched(r: @mut TtReader, // end of the line; duplicate henceforth ad } - matched_seq(ads, _) => ads[*idx] + matched_seq(ref ads, _) => ads[*idx] } } vec::foldl(start, r.repeat_idx, red) @@ -131,15 +131,15 @@ enum lis { fn lockstep_iter_size(t: token_tree, r: @mut TtReader) -> lis { fn lis_merge(lhs: lis, rhs: lis, r: @mut TtReader) -> lis { match lhs { - lis_unconstrained => rhs, - lis_contradiction(_) => lhs, + lis_unconstrained => copy rhs, + lis_contradiction(_) => copy lhs, lis_constraint(l_len, l_id) => match rhs { - lis_unconstrained => lhs, - lis_contradiction(_) => rhs, - lis_constraint(r_len, _) if l_len == r_len => lhs, + lis_unconstrained => copy lhs, + lis_contradiction(_) => copy rhs, + lis_constraint(r_len, _) if l_len == r_len => copy lhs, lis_constraint(r_len, r_id) => { - let l_n = *r.interner.get(l_id); - let r_n = *r.interner.get(r_id); + let l_n = copy *r.interner.get(l_id); + let r_n = copy *r.interner.get(r_id); lis_contradiction(fmt!("Inconsistent lockstep iteration: \ '%s' has %u items, but '%s' has %u", l_n, l_len, r_n, r_len)) @@ -155,14 +155,17 @@ fn lockstep_iter_size(t: token_tree, r: @mut TtReader) -> lis { tt_tok(*) => lis_unconstrained, tt_nonterminal(_, name) => match *lookup_cur_matched(r, name) { matched_nonterminal(_) => lis_unconstrained, - matched_seq(ads, _) => lis_constraint(ads.len(), name) + matched_seq(ref ads, _) => lis_constraint(ads.len(), name) } } } pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan { - let ret_val = TokenAndSpan { tok: r.cur_tok, sp: r.cur_span }; + let ret_val = TokenAndSpan { + tok: copy r.cur_tok, + sp: r.cur_span, + }; while r.cur.idx >= r.cur.readme.len() { /* done with this set; pop or repeat? */ if ! r.cur.dotdotdoted @@ -199,9 +202,9 @@ pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan { loop { /* because it's easiest, this handles `tt_delim` not starting with a `tt_tok`, even though it won't happen */ match r.cur.readme[r.cur.idx] { - tt_delim(tts) => { + tt_delim(copy tts) => { r.cur = @mut TtFrame { - readme: @mut copy tts, + readme: @mut tts, idx: 0u, dotdotdoted: false, sep: None, @@ -210,12 +213,13 @@ pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan { // if this could be 0-length, we'd need to potentially recur here } tt_tok(sp, copy tok) => { - r.cur_span = sp; r.cur_tok = tok; + r.cur_span = sp; + r.cur_tok = tok; r.cur.idx += 1u; return ret_val; } tt_seq(sp, copy tts, copy sep, zerok) => { - match lockstep_iter_size(tt_seq(sp, tts, sep, zerok), r) { + match lockstep_iter_size(tt_seq(sp, copy tts, sep, zerok), r) { lis_unconstrained => { r.sp_diag.span_fatal( sp, /* blame macro writer */ @@ -264,7 +268,8 @@ pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan { return ret_val; } matched_nonterminal(ref other_whole_nt) => { - r.cur_span = sp; r.cur_tok = INTERPOLATED((*other_whole_nt)); + r.cur_span = sp; + r.cur_tok = INTERPOLATED(copy *other_whole_nt); r.cur.idx += 1u; return ret_val; } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index eb9dd0ec03ce5..f820669ab1c6d 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -19,27 +19,27 @@ use core::option; use core::vec; pub trait ast_fold { - fn fold_crate(@self, crate) -> crate; - fn fold_view_item(@self, &&v: @view_item) -> @view_item; - fn fold_foreign_item(@self, &&v: @foreign_item) -> @foreign_item; - fn fold_item(@self, &&v: @item) -> Option<@item>; - fn fold_struct_field(@self, &&v: @struct_field) -> @struct_field; - fn fold_item_underscore(@self, item_) -> item_; - fn fold_method(@self, &&v: @method) -> @method; - fn fold_block(@self, blk) -> blk; - fn fold_stmt(@self, &&v: @stmt) -> @stmt; - fn fold_arm(@self, arm) -> arm; - fn fold_pat(@self, &&v: @pat) -> @pat; - fn fold_decl(@self, &&v: @decl) -> @decl; - fn fold_expr(@self, &&v: @expr) -> @expr; - fn fold_ty(@self, &&v: @Ty) -> @Ty; - fn fold_mod(@self, _mod) -> _mod; - fn fold_foreign_mod(@self, foreign_mod) -> foreign_mod; - fn fold_variant(@self, variant) -> variant; - fn fold_ident(@self, &&v: ident) -> ident; - fn fold_path(@self, &&v: @path) -> @path; - fn fold_local(@self, &&v: @local) -> @local; - fn map_exprs(@self, fn@(&&v: @expr) -> @expr, ~[@expr]) -> ~[@expr]; + fn fold_crate(@self, &crate) -> crate; + fn fold_view_item(@self, @view_item) -> @view_item; + fn fold_foreign_item(@self, @foreign_item) -> @foreign_item; + fn fold_item(@self, @item) -> Option<@item>; + fn fold_struct_field(@self, @struct_field) -> @struct_field; + fn fold_item_underscore(@self, &item_) -> item_; + fn fold_method(@self, @method) -> @method; + fn fold_block(@self, &blk) -> blk; + fn fold_stmt(@self, &stmt) -> @stmt; + fn fold_arm(@self, &arm) -> arm; + fn fold_pat(@self, @pat) -> @pat; + fn fold_decl(@self, @decl) -> @decl; + fn fold_expr(@self, @expr) -> @expr; + fn fold_ty(@self, @Ty) -> @Ty; + fn fold_mod(@self, &_mod) -> _mod; + fn fold_foreign_mod(@self, &foreign_mod) -> foreign_mod; + fn fold_variant(@self, &variant) -> variant; + fn fold_ident(@self, ident) -> ident; + fn fold_path(@self, @path) -> @path; + fn fold_local(@self, @local) -> @local; + fn map_exprs(@self, fn@(@expr) -> @expr, &[@expr]) -> ~[@expr]; fn new_id(@self, node_id) -> node_id; fn new_span(@self, span) -> span; } @@ -48,27 +48,27 @@ pub trait ast_fold { pub struct AstFoldFns { //unlike the others, item_ is non-trivial - fold_crate: fn@(crate_, span, ast_fold) -> (crate_, span), + fold_crate: fn@(&crate_, span, ast_fold) -> (crate_, span), fold_view_item: fn@(view_item_, ast_fold) -> view_item_, - fold_foreign_item: fn@(&&v: @foreign_item, ast_fold) -> @foreign_item, - fold_item: fn@(&&v: @item, ast_fold) -> Option<@item>, - fold_struct_field: fn@(&&v: @struct_field, ast_fold) -> @struct_field, - fold_item_underscore: fn@(item_, ast_fold) -> item_, - fold_method: fn@(&&v: @method, ast_fold) -> @method, - fold_block: fn@(blk_, span, ast_fold) -> (blk_, span), - fold_stmt: fn@(stmt_, span, ast_fold) -> (stmt_, span), - fold_arm: fn@(arm, ast_fold) -> arm, - fold_pat: fn@(pat_, span, ast_fold) -> (pat_, span), - fold_decl: fn@(decl_, span, ast_fold) -> (decl_, span), - fold_expr: fn@(expr_, span, ast_fold) -> (expr_, span), - fold_ty: fn@(ty_, span, ast_fold) -> (ty_, span), - fold_mod: fn@(_mod, ast_fold) -> _mod, - fold_foreign_mod: fn@(foreign_mod, ast_fold) -> foreign_mod, - fold_variant: fn@(variant_, span, ast_fold) -> (variant_, span), - fold_ident: fn@(&&v: ident, ast_fold) -> ident, - fold_path: fn@(path, ast_fold) -> path, - fold_local: fn@(local_, span, ast_fold) -> (local_, span), - map_exprs: fn@(fn@(&&v: @expr) -> @expr, ~[@expr]) -> ~[@expr], + fold_foreign_item: fn@(@foreign_item, ast_fold) -> @foreign_item, + fold_item: fn@(@item, ast_fold) -> Option<@item>, + fold_struct_field: fn@(@struct_field, ast_fold) -> @struct_field, + fold_item_underscore: fn@(&item_, ast_fold) -> item_, + fold_method: fn@(@method, ast_fold) -> @method, + fold_block: fn@(&blk_, span, ast_fold) -> (blk_, span), + fold_stmt: fn@(&stmt_, span, ast_fold) -> (stmt_, span), + fold_arm: fn@(&arm, ast_fold) -> arm, + fold_pat: fn@(&pat_, span, ast_fold) -> (pat_, span), + fold_decl: fn@(&decl_, span, ast_fold) -> (decl_, span), + fold_expr: fn@(&expr_, span, ast_fold) -> (expr_, span), + fold_ty: fn@(&ty_, span, ast_fold) -> (ty_, span), + fold_mod: fn@(&_mod, ast_fold) -> _mod, + fold_foreign_mod: fn@(&foreign_mod, ast_fold) -> foreign_mod, + fold_variant: fn@(&variant_, span, ast_fold) -> (variant_, span), + fold_ident: fn@(ident, ast_fold) -> ident, + fold_path: fn@(@path, ast_fold) -> path, + fold_local: fn@(&local_, span, ast_fold) -> (local_, span), + map_exprs: fn@(fn@(@expr) -> @expr, &[@expr]) -> ~[@expr], new_id: fn@(node_id) -> node_id, new_span: fn@(span) -> span } @@ -78,18 +78,20 @@ pub type ast_fold_fns = @AstFoldFns; /* some little folds that probably aren't useful to have in ast_fold itself*/ //used in noop_fold_item and noop_fold_crate and noop_fold_crate_directive -fn fold_meta_item_(&&mi: @meta_item, fld: @ast_fold) -> @meta_item { +fn fold_meta_item_(mi: @meta_item, fld: @ast_fold) -> @meta_item { @spanned { node: match mi.node { - meta_word(ref id) => meta_word((*id)), - meta_list(ref id, ref mis) => { + meta_word(id) => meta_word(id), + meta_list(id, ref mis) => { let fold_meta_item = |x| fold_meta_item_(x, fld); - meta_list(/* FIXME: (#2543) */ copy *id, - mis.map(|e| fold_meta_item(*e))) + meta_list( + id, + mis.map(|e| fold_meta_item(*e)) + ) } - meta_name_value(ref id, s) => { - meta_name_value((*id), /* FIXME (#2543) */ copy s) + meta_name_value(id, s) => { + meta_name_value(id, /* FIXME (#2543) */ copy s) } }, span: fld.new_span(mi.span) } @@ -99,7 +101,7 @@ fn fold_attribute_(at: attribute, fld: @ast_fold) -> attribute { spanned { node: ast::attribute_ { style: at.node.style, - value: *fold_meta_item_(@at.node.value, fld), + value: fold_meta_item_(at.node.value, fld), is_sugared_doc: at.node.is_sugared_doc, }, span: fld.new_span(at.span), @@ -117,13 +119,13 @@ fn fold_arg_(a: arg, fld: @ast_fold) -> arg { } //used in noop_fold_expr, and possibly elsewhere in the future fn fold_mac_(m: mac, fld: @ast_fold) -> mac { - spanned { node: match m.node { - mac_invoc_tt(*) => m.node, - }, - span: fld.new_span(m.span) } + spanned { + node: match m.node { mac_invoc_tt(*) => copy m.node }, + span: fld.new_span(m.span), + } } -pub fn fold_fn_decl(decl: ast::fn_decl, fld: @ast_fold) -> ast::fn_decl { +pub fn fold_fn_decl(decl: &ast::fn_decl, fld: @ast_fold) -> ast::fn_decl { ast::fn_decl { inputs: decl.inputs.map(|x| fold_arg_(*x, fld)), output: fld.fold_ty(decl.output), @@ -131,9 +133,8 @@ pub fn fold_fn_decl(decl: ast::fn_decl, fld: @ast_fold) -> ast::fn_decl { } } -fn fold_ty_param_bound(tpb: TyParamBound, - fld: @ast_fold) -> TyParamBound { - match tpb { +fn fold_ty_param_bound(tpb: &TyParamBound, fld: @ast_fold) -> TyParamBound { + match *tpb { TraitTyParamBound(ty) => TraitTyParamBound(fld.fold_ty(ty)), RegionTyParamBound => RegionTyParamBound } @@ -143,7 +144,7 @@ pub fn fold_ty_param(tp: TyParam, fld: @ast_fold) -> TyParam { TyParam {ident: tp.ident, id: fld.new_id(tp.id), - bounds: @tp.bounds.map(|x| fold_ty_param_bound(*x, fld))} + bounds: @tp.bounds.map(|x| fold_ty_param_bound(x, fld))} } pub fn fold_ty_params(tps: &OptVec, @@ -168,12 +169,12 @@ pub fn fold_generics(generics: &Generics, fld: @ast_fold) -> Generics { lifetimes: fold_lifetimes(&generics.lifetimes, fld)} } -pub fn noop_fold_crate(c: crate_, fld: @ast_fold) -> crate_ { +pub fn noop_fold_crate(c: &crate_, fld: @ast_fold) -> crate_ { let fold_meta_item = |x| fold_meta_item_(x, fld); let fold_attribute = |x| fold_attribute_(x, fld); crate_ { - module: fld.fold_mod(c.module), + module: fld.fold_mod(&c.module), attrs: c.attrs.map(|x| fold_attribute(*x)), config: c.config.map(|x| fold_meta_item(*x)), } @@ -184,17 +185,17 @@ fn noop_fold_view_item(vi: view_item_, _fld: @ast_fold) -> view_item_ { } -fn noop_fold_foreign_item(&&ni: @foreign_item, fld: @ast_fold) +fn noop_fold_foreign_item(ni: @foreign_item, fld: @ast_fold) -> @foreign_item { let fold_arg = |x| fold_arg_(x, fld); let fold_attribute = |x| fold_attribute_(x, fld); @ast::foreign_item { ident: fld.fold_ident(ni.ident), - attrs: vec::map(ni.attrs, |x| fold_attribute(*x)), + attrs: ni.attrs.map(|x| fold_attribute(*x)), node: match ni.node { - foreign_item_fn(fdec, purity, ref generics) => { + foreign_item_fn(ref fdec, purity, ref generics) => { foreign_item_fn( ast::fn_decl { inputs: fdec.inputs.map(|a| fold_arg(*a)), @@ -214,18 +215,18 @@ fn noop_fold_foreign_item(&&ni: @foreign_item, fld: @ast_fold) } } -pub fn noop_fold_item(&&i: @item, fld: @ast_fold) -> Option<@item> { +pub fn noop_fold_item(i: @item, fld: @ast_fold) -> Option<@item> { let fold_attribute = |x| fold_attribute_(x, fld); Some(@ast::item { ident: fld.fold_ident(i.ident), attrs: i.attrs.map(|e| fold_attribute(*e)), id: fld.new_id(i.id), - node: fld.fold_item_underscore(i.node), + node: fld.fold_item_underscore(&i.node), vis: i.vis, span: fld.new_span(i.span) }) } -fn noop_fold_struct_field(&&sf: @struct_field, fld: @ast_fold) +fn noop_fold_struct_field(sf: @struct_field, fld: @ast_fold) -> @struct_field { @spanned { node: ast::struct_field_ { kind: copy sf.node.kind, id: sf.node.id, @@ -233,53 +234,66 @@ fn noop_fold_struct_field(&&sf: @struct_field, fld: @ast_fold) span: sf.span } } -pub fn noop_fold_item_underscore(i: item_, fld: @ast_fold) -> item_ { - match i { +pub fn noop_fold_item_underscore(i: &item_, fld: @ast_fold) -> item_ { + match *i { item_const(t, e) => item_const(fld.fold_ty(t), fld.fold_expr(e)), - item_fn(ref decl, purity, ref typms, ref body) => { - item_fn(fold_fn_decl(/* FIXME (#2543) */ copy *decl, fld), - purity, - fold_generics(typms, fld), - fld.fold_block(*body)) - } - item_mod(m) => item_mod(fld.fold_mod(m)), - item_foreign_mod(nm) => item_foreign_mod(fld.fold_foreign_mod(nm)), - item_ty(t, ref typms) => item_ty(fld.fold_ty(t), - fold_generics(typms, fld)), - item_enum(ref enum_definition, ref typms) => { - item_enum(ast::enum_def(ast::enum_def_ { - variants: enum_definition.variants.map( - |x| fld.fold_variant(*x)), - common: enum_definition.common.map( - |x| fold_struct_def(*x, fld)), - }), fold_generics(typms, fld)) - } - item_struct(ref struct_def, ref typms) => { - let struct_def = fold_struct_def( - /* FIXME (#2543) */ copy *struct_def, - fld); - item_struct(struct_def, /* FIXME (#2543) */ copy *typms) - } - item_impl(ref tps, ifce, ty, ref methods) => { - item_impl(fold_generics(tps, fld), - ifce.map(|p| fold_trait_ref(*p, fld)), - fld.fold_ty(ty), - methods.map(|x| fld.fold_method(*x))) - } - item_trait(ref tps, ref traits, ref methods) => { + item_fn(ref decl, purity, ref generics, ref body) => { + item_fn( + fold_fn_decl(decl, fld), + purity, + fold_generics(generics, fld), + fld.fold_block(body) + ) + } + item_mod(ref m) => item_mod(fld.fold_mod(m)), + item_foreign_mod(ref nm) => { + item_foreign_mod(fld.fold_foreign_mod(nm)) + } + item_ty(t, ref generics) => { + item_ty(fld.fold_ty(t), fold_generics(generics, fld)) + } + item_enum(ref enum_definition, ref generics) => { + item_enum( + ast::enum_def( + ast::enum_def_ { + variants: do enum_definition.variants.map |x| { + fld.fold_variant(x) + }, + common: do enum_definition.common.map |x| { + fold_struct_def(*x, fld) + } + } + ), + fold_generics(generics, fld)) + } + item_struct(ref struct_def, ref generics) => { + let struct_def = fold_struct_def(*struct_def, fld); + item_struct(struct_def, /* FIXME (#2543) */ copy *generics) + } + item_impl(ref generics, ifce, ty, ref methods) => { + item_impl( + fold_generics(generics, fld), + ifce.map(|p| fold_trait_ref(*p, fld)), + fld.fold_ty(ty), + methods.map(|x| fld.fold_method(*x)) + ) + } + item_trait(ref generics, ref traits, ref methods) => { let methods = do methods.map |method| { match *method { required(*) => copy *method, provided(method) => provided(fld.fold_method(method)) } }; - item_trait(fold_generics(tps, fld), - traits.map(|p| fold_trait_ref(*p, fld)), - methods) + item_trait( + fold_generics(generics, fld), + traits.map(|p| fold_trait_ref(*p, fld)), + methods + ) } item_mac(ref m) => { // FIXME #2888: we might actually want to do something here. - item_mac((*m)) + item_mac(copy *m) } } } @@ -287,12 +301,16 @@ pub fn noop_fold_item_underscore(i: item_, fld: @ast_fold) -> item_ { fn fold_struct_def(struct_def: @ast::struct_def, fld: @ast_fold) -> @ast::struct_def { let dtor = do option::map(&struct_def.dtor) |dtor| { - let dtor_body = fld.fold_block(dtor.node.body); + let dtor_body = fld.fold_block(&dtor.node.body); let dtor_id = fld.new_id(dtor.node.id); - spanned { node: ast::struct_dtor_ { body: dtor_body, - id: dtor_id, - .. dtor.node}, - span: dtor.span } + spanned { + node: ast::struct_dtor_ { + body: dtor_body, + id: dtor_id, + .. copy dtor.node + }, + span: copy dtor.span + } }; @ast::struct_def { fields: struct_def.fields.map(|f| fold_struct_field(*f, fld)), @@ -301,29 +319,33 @@ fn fold_struct_def(struct_def: @ast::struct_def, fld: @ast_fold) } } -fn fold_trait_ref(&&p: @trait_ref, fld: @ast_fold) -> @trait_ref { +fn fold_trait_ref(p: @trait_ref, fld: @ast_fold) -> @trait_ref { @ast::trait_ref { path: fld.fold_path(p.path), ref_id: fld.new_id(p.ref_id), } } -fn fold_struct_field(&&f: @struct_field, fld: @ast_fold) -> @struct_field { - @spanned { node: ast::struct_field_ { kind: copy f.node.kind, - id: fld.new_id(f.node.id), - ty: fld.fold_ty(f.node.ty) }, - span: fld.new_span(f.span) } +fn fold_struct_field(f: @struct_field, fld: @ast_fold) -> @struct_field { + @spanned { + node: ast::struct_field_ { + kind: copy f.node.kind, + id: fld.new_id(f.node.id), + ty: fld.fold_ty(f.node.ty), + }, + span: fld.new_span(f.span), + } } -fn noop_fold_method(&&m: @method, fld: @ast_fold) -> @method { +fn noop_fold_method(m: @method, fld: @ast_fold) -> @method { @ast::method { ident: fld.fold_ident(m.ident), attrs: /* FIXME (#2543) */ copy m.attrs, generics: fold_generics(&m.generics, fld), self_ty: m.self_ty, purity: m.purity, - decl: fold_fn_decl(m.decl, fld), - body: fld.fold_block(m.body), + decl: fold_fn_decl(&m.decl, fld), + body: fld.fold_block(&m.body), id: fld.new_id(m.id), span: fld.new_span(m.span), self_id: fld.new_id(m.self_id), @@ -332,7 +354,7 @@ fn noop_fold_method(&&m: @method, fld: @ast_fold) -> @method { } -pub fn noop_fold_block(b: blk_, fld: @ast_fold) -> blk_ { +pub fn noop_fold_block(b: &blk_, fld: @ast_fold) -> blk_ { ast::blk_ { view_items: b.view_items.map(|x| fld.fold_view_item(*x)), stmts: b.stmts.map(|x| fld.fold_stmt(*x)), @@ -342,38 +364,42 @@ pub fn noop_fold_block(b: blk_, fld: @ast_fold) -> blk_ { } } -fn noop_fold_stmt(s: stmt_, fld: @ast_fold) -> stmt_ { +fn noop_fold_stmt(s: &stmt_, fld: @ast_fold) -> stmt_ { let fold_mac = |x| fold_mac_(x, fld); - return match s { - stmt_decl(d, nid) => stmt_decl(fld.fold_decl(d), fld.new_id(nid)), - stmt_expr(e, nid) => stmt_expr(fld.fold_expr(e), fld.new_id(nid)), - stmt_semi(e, nid) => stmt_semi(fld.fold_expr(e), fld.new_id(nid)), - stmt_mac(ref mac, semi) => stmt_mac(fold_mac((*mac)), semi) - }; + match *s { + stmt_decl(d, nid) => stmt_decl(fld.fold_decl(d), fld.new_id(nid)), + stmt_expr(e, nid) => stmt_expr(fld.fold_expr(e), fld.new_id(nid)), + stmt_semi(e, nid) => stmt_semi(fld.fold_expr(e), fld.new_id(nid)), + stmt_mac(ref mac, semi) => stmt_mac(fold_mac((*mac)), semi) + } } -fn noop_fold_arm(a: arm, fld: @ast_fold) -> arm { +fn noop_fold_arm(a: &arm, fld: @ast_fold) -> arm { arm { - pats: vec::map(a.pats, |x| fld.fold_pat(*x)), - guard: option::map(&a.guard, |x| fld.fold_expr(*x)), - body: fld.fold_block(a.body), - } -} - -pub fn noop_fold_pat(p: pat_, fld: @ast_fold) -> pat_ { - return match p { - pat_wild => pat_wild, - pat_ident(binding_mode, pth, sub) => { - pat_ident(binding_mode, - fld.fold_path(pth), - option::map(&sub, |x| fld.fold_pat(*x))) - } - pat_lit(e) => pat_lit(fld.fold_expr(e)), - pat_enum(pth, pats) => { - pat_enum(fld.fold_path(pth), option::map(&pats, - |pats| vec::map(*pats, |x| fld.fold_pat(*x)))) - } - pat_rec(fields, etc) => { + pats: a.pats.map(|x| fld.fold_pat(*x)), + guard: a.guard.map(|x| fld.fold_expr(*x)), + body: fld.fold_block(&a.body), + } +} + +pub fn noop_fold_pat(p: &pat_, fld: @ast_fold) -> pat_ { + match *p { + pat_wild => pat_wild, + pat_ident(binding_mode, pth, ref sub) => { + pat_ident( + binding_mode, + fld.fold_path(pth), + sub.map(|x| fld.fold_pat(*x)) + ) + } + pat_lit(e) => pat_lit(fld.fold_expr(e)), + pat_enum(pth, ref pats) => { + pat_enum( + fld.fold_path(pth), + pats.map(|pats| pats.map(|x| fld.fold_pat(*x))) + ) + } + pat_rec(ref fields, etc) => { let fs = do fields.map |f| { ast::field_pat { ident: /* FIXME (#2543) */ copy f.ident, @@ -381,8 +407,8 @@ pub fn noop_fold_pat(p: pat_, fld: @ast_fold) -> pat_ { } }; pat_rec(fs, etc) - } - pat_struct(pth, fields, etc) => { + } + pat_struct(pth, ref fields, etc) => { let pth_ = fld.fold_path(pth); let fs = do fields.map |f| { ast::field_pat { @@ -391,40 +417,44 @@ pub fn noop_fold_pat(p: pat_, fld: @ast_fold) -> pat_ { } }; pat_struct(pth_, fs, etc) - } - pat_tup(elts) => pat_tup(vec::map(elts, |x| fld.fold_pat(*x))), - pat_box(inner) => pat_box(fld.fold_pat(inner)), - pat_uniq(inner) => pat_uniq(fld.fold_pat(inner)), - pat_region(inner) => pat_region(fld.fold_pat(inner)), - pat_range(e1, e2) => { + } + pat_tup(ref elts) => pat_tup(elts.map(|x| fld.fold_pat(*x))), + pat_box(inner) => pat_box(fld.fold_pat(inner)), + pat_uniq(inner) => pat_uniq(fld.fold_pat(inner)), + pat_region(inner) => pat_region(fld.fold_pat(inner)), + pat_range(e1, e2) => { pat_range(fld.fold_expr(e1), fld.fold_expr(e2)) - }, - pat_vec(elts, tail) => pat_vec( - vec::map(elts, |x| fld.fold_pat(*x)), - option::map(&tail, |tail| fld.fold_pat(*tail)) - ) - }; + }, + pat_vec(ref elts, ref tail) => { + pat_vec( + elts.map(|x| fld.fold_pat(*x)), + tail.map(|tail| fld.fold_pat(*tail)) + ) + } + } } -fn noop_fold_decl(d: decl_, fld: @ast_fold) -> decl_ { - match d { - decl_local(ls) => decl_local(vec::map(ls, |x| fld.fold_local(*x))), - decl_item(it) => match fld.fold_item(it) { - Some(it_folded) => decl_item(it_folded), - None => decl_local(~[]) - } +fn noop_fold_decl(d: &decl_, fld: @ast_fold) -> decl_ { + match *d { + decl_local(ref ls) => decl_local(ls.map(|x| fld.fold_local(*x))), + decl_item(it) => { + match fld.fold_item(it) { + Some(it_folded) => decl_item(it_folded), + None => decl_local(~[]), + } + } } } -pub fn wrap(f: fn@(T, ast_fold) -> T) - -> fn@(T, span, ast_fold) -> (T, span) +pub fn wrap(f: fn@(&T, ast_fold) -> T) + -> fn@(&T, span, ast_fold) -> (T, span) { - return fn@(x: T, s: span, fld: @ast_fold) -> (T, span) { + fn@(x: &T, s: span, fld: @ast_fold) -> (T, span) { (f(x, fld), s) } } -pub fn noop_fold_expr(e: expr_, fld: @ast_fold) -> expr_ { +pub fn noop_fold_expr(e: &expr_, fld: @ast_fold) -> expr_ { fn fold_field_(field: field, fld: @ast_fold) -> field { spanned { node: ast::field_ { @@ -439,157 +469,196 @@ pub fn noop_fold_expr(e: expr_, fld: @ast_fold) -> expr_ { let fold_mac = |x| fold_mac_(x, fld); - return match e { - expr_vstore(e, v) => { + match *e { + expr_vstore(e, v) => { expr_vstore(fld.fold_expr(e), v) - } - expr_vec(exprs, mutt) => { - expr_vec(fld.map_exprs(|x| fld.fold_expr(x), exprs), mutt) - } - expr_repeat(expr, count, mutt) => - expr_repeat(fld.fold_expr(expr), fld.fold_expr(count), mutt), - expr_rec(ref fields, maybe_expr) => { - expr_rec(vec::map((*fields), |x| fold_field(*x)), - option::map(&maybe_expr, |x| fld.fold_expr(*x))) - } - expr_tup(elts) => expr_tup(vec::map(elts, |x| fld.fold_expr(*x))), - expr_call(f, args, blk) => { - expr_call(fld.fold_expr(f), - fld.map_exprs(|x| fld.fold_expr(x), args), - blk) - } - expr_method_call(f, i, tps, args, blk) => { - expr_method_call(fld.fold_expr(f), - fld.fold_ident(i), - vec::map(tps, |x| fld.fold_ty(*x)), - fld.map_exprs(|x| fld.fold_expr(x), args), - blk) - } - expr_binary(binop, lhs, rhs) => { + } + expr_vec(ref exprs, mutt) => { + expr_vec(fld.map_exprs(|x| fld.fold_expr(x), *exprs), mutt) + } + expr_repeat(expr, count, mutt) => { + expr_repeat(fld.fold_expr(expr), fld.fold_expr(count), mutt) + } + expr_rec(ref fields, maybe_expr) => { + expr_rec( + fields.map(|x| fold_field(*x)), + maybe_expr.map(|x| fld.fold_expr(*x)) + ) + } + expr_tup(ref elts) => expr_tup(elts.map(|x| fld.fold_expr(*x))), + expr_call(f, ref args, blk) => { + expr_call( + fld.fold_expr(f), + fld.map_exprs(|x| fld.fold_expr(x), *args), + blk + ) + } + expr_method_call(f, i, ref tps, ref args, blk) => { + expr_method_call( + fld.fold_expr(f), + fld.fold_ident(i), + tps.map(|x| fld.fold_ty(*x)), + fld.map_exprs(|x| fld.fold_expr(x), *args), + blk + ) + } + expr_binary(binop, lhs, rhs) => { expr_binary(binop, fld.fold_expr(lhs), fld.fold_expr(rhs)) - } - expr_unary(binop, ohs) => expr_unary(binop, fld.fold_expr(ohs)), - expr_loop_body(f) => expr_loop_body(fld.fold_expr(f)), - expr_do_body(f) => expr_do_body(fld.fold_expr(f)), - expr_lit(_) => copy e, - expr_cast(expr, ty) => expr_cast(fld.fold_expr(expr), ty), - expr_addr_of(m, ohs) => expr_addr_of(m, fld.fold_expr(ohs)), - expr_if(cond, ref tr, fl) => { - expr_if(fld.fold_expr(cond), fld.fold_block((*tr)), - option::map(&fl, |x| fld.fold_expr(*x))) - } - expr_while(cond, ref body) => { - expr_while(fld.fold_expr(cond), fld.fold_block((*body))) - } - expr_loop(ref body, opt_ident) => { - expr_loop(fld.fold_block((*body)), - option::map(&opt_ident, |x| fld.fold_ident(*x))) - } - expr_match(expr, ref arms) => { - expr_match(fld.fold_expr(expr), - vec::map((*arms), |x| fld.fold_arm(*x))) - } - expr_fn(proto, ref decl, ref body, _) => { - expr_fn(proto, - fold_fn_decl(/* FIXME (#2543) */ copy *decl, fld), - fld.fold_block(*body), - @()) - } - expr_fn_block(ref decl, ref body) => { - expr_fn_block(fold_fn_decl(/* FIXME (#2543) */ copy *decl, fld), - fld.fold_block(*body)) - } - expr_block(ref blk) => expr_block(fld.fold_block((*blk))), - expr_copy(e) => expr_copy(fld.fold_expr(e)), - expr_assign(el, er) => { + } + expr_unary(binop, ohs) => expr_unary(binop, fld.fold_expr(ohs)), + expr_loop_body(f) => expr_loop_body(fld.fold_expr(f)), + expr_do_body(f) => expr_do_body(fld.fold_expr(f)), + expr_lit(_) => copy *e, + expr_cast(expr, ty) => expr_cast(fld.fold_expr(expr), ty), + expr_addr_of(m, ohs) => expr_addr_of(m, fld.fold_expr(ohs)), + expr_if(cond, ref tr, fl) => { + expr_if( + fld.fold_expr(cond), + fld.fold_block(tr), + fl.map(|x| fld.fold_expr(*x)) + ) + } + expr_while(cond, ref body) => { + expr_while(fld.fold_expr(cond), fld.fold_block(body)) + } + expr_loop(ref body, opt_ident) => { + expr_loop( + fld.fold_block(body), + opt_ident.map(|x| fld.fold_ident(*x)) + ) + } + expr_match(expr, ref arms) => { + expr_match( + fld.fold_expr(expr), + arms.map(|x| fld.fold_arm(x)) + ) + } + expr_fn(proto, ref decl, ref body, _) => { + expr_fn( + proto, + fold_fn_decl(decl, fld), + fld.fold_block(body), + @() + ) + } + expr_fn_block(ref decl, ref body) => { + expr_fn_block( + fold_fn_decl(decl, fld), + fld.fold_block(body) + ) + } + expr_block(ref blk) => expr_block(fld.fold_block(blk)), + expr_copy(e) => expr_copy(fld.fold_expr(e)), + expr_assign(el, er) => { expr_assign(fld.fold_expr(el), fld.fold_expr(er)) - } - expr_swap(el, er) => { + } + expr_swap(el, er) => { expr_swap(fld.fold_expr(el), fld.fold_expr(er)) - } - expr_assign_op(op, el, er) => { + } + expr_assign_op(op, el, er) => { expr_assign_op(op, fld.fold_expr(el), fld.fold_expr(er)) - } - expr_field(el, id, tys) => { - expr_field(fld.fold_expr(el), fld.fold_ident(id), - vec::map(tys, |x| fld.fold_ty(*x))) - } - expr_index(el, er) => { + } + expr_field(el, id, ref tys) => { + expr_field( + fld.fold_expr(el), fld.fold_ident(id), + tys.map(|x| fld.fold_ty(*x)) + ) + } + expr_index(el, er) => { expr_index(fld.fold_expr(el), fld.fold_expr(er)) - } - expr_path(pth) => expr_path(fld.fold_path(pth)), - expr_break(opt_ident) => - expr_break(option::map(&opt_ident, |x| fld.fold_ident(*x))), - expr_again(opt_ident) => - expr_again(option::map(&opt_ident, |x| fld.fold_ident(*x))), - expr_ret(e) => expr_ret(option::map(&e, |x| fld.fold_expr(*x))), - expr_log(i, lv, e) => expr_log(i, fld.fold_expr(lv), - fld.fold_expr(e)), - expr_assert(e) => expr_assert(fld.fold_expr(e)), - expr_mac(ref mac) => expr_mac(fold_mac((*mac))), - expr_struct(path, ref fields, maybe_expr) => { - expr_struct(fld.fold_path(path), - vec::map((*fields), |x| fold_field(*x)), - option::map(&maybe_expr, |x| fld.fold_expr(*x))) - }, - expr_paren(ex) => expr_paren(fld.fold_expr(ex)) - } -} - -pub fn noop_fold_ty(t: ty_, fld: @ast_fold) -> ty_ { + } + expr_path(pth) => expr_path(fld.fold_path(pth)), + expr_break(ref opt_ident) => { + expr_break(opt_ident.map(|x| fld.fold_ident(*x))) + } + expr_again(ref opt_ident) => { + expr_again(opt_ident.map(|x| fld.fold_ident(*x))) + } + expr_ret(ref e) => { + expr_ret(e.map(|x| fld.fold_expr(*x))) + } + expr_log(i, lv, e) => { + expr_log( + i, + fld.fold_expr(lv), + fld.fold_expr(e) + ) + } + expr_assert(e) => expr_assert(fld.fold_expr(e)), + expr_mac(ref mac) => expr_mac(fold_mac((*mac))), + expr_struct(path, ref fields, maybe_expr) => { + expr_struct( + fld.fold_path(path), + fields.map(|x| fold_field(*x)), + maybe_expr.map(|x| fld.fold_expr(*x)) + ) + }, + expr_paren(ex) => expr_paren(fld.fold_expr(ex)) + } +} + +pub fn noop_fold_ty(t: &ty_, fld: @ast_fold) -> ty_ { let fold_mac = |x| fold_mac_(x, fld); - fn fold_mt(mt: mt, fld: @ast_fold) -> mt { - mt { ty: fld.fold_ty(mt.ty), mutbl: mt.mutbl } + fn fold_mt(mt: &mt, fld: @ast_fold) -> mt { + mt { + ty: fld.fold_ty(mt.ty), + mutbl: mt.mutbl, + } } fn fold_field(f: ty_field, fld: @ast_fold) -> ty_field { spanned { node: ast::ty_field_ { ident: fld.fold_ident(f.node.ident), - mt: fold_mt(f.node.mt, fld), + mt: fold_mt(&f.node.mt, fld), }, span: fld.new_span(f.span), } } - match t { - ty_nil | ty_bot | ty_infer => copy t, - ty_box(mt) => ty_box(fold_mt(mt, fld)), - ty_uniq(mt) => ty_uniq(fold_mt(mt, fld)), - ty_vec(mt) => ty_vec(fold_mt(mt, fld)), - ty_ptr(mt) => ty_ptr(fold_mt(mt, fld)), - ty_rptr(region, mt) => ty_rptr(region, fold_mt(mt, fld)), - ty_rec(ref fields) => - ty_rec(vec::map((*fields), |f| fold_field(*f, fld))), - ty_closure(f) => - ty_closure(@TyClosure { - sigil: f.sigil, - purity: f.purity, - region: f.region, - onceness: f.onceness, - decl: fold_fn_decl(f.decl, fld) - }), - ty_bare_fn(f) => - ty_bare_fn(@TyBareFn { - purity: f.purity, - abi: f.abi, - decl: fold_fn_decl(f.decl, fld) - }), - ty_tup(tys) => ty_tup(vec::map(tys, |ty| fld.fold_ty(*ty))), - ty_path(path, id) => ty_path(fld.fold_path(path), fld.new_id(id)), - ty_fixed_length_vec(mt, vs) => - ty_fixed_length_vec(fold_mt(mt, fld), vs), - ty_mac(ref mac) => ty_mac(fold_mac((*mac))) + match *t { + ty_nil | ty_bot | ty_infer => copy *t, + ty_box(ref mt) => ty_box(fold_mt(mt, fld)), + ty_uniq(ref mt) => ty_uniq(fold_mt(mt, fld)), + ty_vec(ref mt) => ty_vec(fold_mt(mt, fld)), + ty_ptr(ref mt) => ty_ptr(fold_mt(mt, fld)), + ty_rptr(region, ref mt) => ty_rptr(region, fold_mt(mt, fld)), + ty_rec(ref fields) => ty_rec(fields.map(|f| fold_field(*f, fld))), + ty_closure(ref f) => { + ty_closure(@TyClosure { + sigil: f.sigil, + purity: f.purity, + region: f.region, + onceness: f.onceness, + decl: fold_fn_decl(&f.decl, fld) + }) + } + ty_bare_fn(ref f) => { + ty_bare_fn(@TyBareFn { + purity: f.purity, + abi: f.abi, + decl: fold_fn_decl(&f.decl, fld) + }) + } + ty_tup(ref tys) => ty_tup(tys.map(|ty| fld.fold_ty(*ty))), + ty_path(path, id) => ty_path(fld.fold_path(path), fld.new_id(id)), + ty_fixed_length_vec(ref mt, vs) => { + ty_fixed_length_vec( + fold_mt(mt, fld), + vs + ) + } + ty_mac(ref mac) => ty_mac(fold_mac(*mac)) } } // ...nor do modules -pub fn noop_fold_mod(m: _mod, fld: @ast_fold) -> _mod { +pub fn noop_fold_mod(m: &_mod, fld: @ast_fold) -> _mod { ast::_mod { view_items: vec::map(m.view_items, |x| fld.fold_view_item(*x)), items: vec::filter_mapped(m.items, |x| fld.fold_item(*x)), } } -fn noop_fold_foreign_mod(nm: foreign_mod, fld: @ast_fold) -> foreign_mod { +fn noop_fold_foreign_mod(nm: &foreign_mod, fld: @ast_fold) -> foreign_mod { ast::foreign_mod { sort: nm.sort, abi: nm.abi, @@ -598,7 +667,7 @@ fn noop_fold_foreign_mod(nm: foreign_mod, fld: @ast_fold) -> foreign_mod { } } -fn noop_fold_variant(v: variant_, fld: @ast_fold) -> variant_ { +fn noop_fold_variant(v: &variant_, fld: @ast_fold) -> variant_ { fn fold_variant_arg_(va: variant_arg, fld: @ast_fold) -> variant_arg { ast::variant_arg { ty: fld.fold_ty(va.ty), id: fld.new_id(va.id) } } @@ -606,17 +675,23 @@ fn noop_fold_variant(v: variant_, fld: @ast_fold) -> variant_ { let kind; match v.kind { - tuple_variant_kind(variant_args) => - kind = tuple_variant_kind(vec::map(variant_args, - |x| fold_variant_arg(*x))), + tuple_variant_kind(ref variant_args) => { + kind = tuple_variant_kind(do variant_args.map |x| { + fold_variant_arg(*x) + }) + } struct_variant_kind(struct_def) => { let dtor = do option::map(&struct_def.dtor) |dtor| { - let dtor_body = fld.fold_block(dtor.node.body); + let dtor_body = fld.fold_block(&dtor.node.body); let dtor_id = fld.new_id(dtor.node.id); - spanned { node: ast::struct_dtor_ { body: dtor_body, - id: dtor_id, - .. dtor.node}, - .. *dtor } + spanned { + node: ast::struct_dtor_ { + body: dtor_body, + id: dtor_id, + .. copy dtor.node + }, + .. copy *dtor + } }; kind = struct_variant_kind(@ast::struct_def { fields: vec::map(struct_def.fields, @@ -625,12 +700,13 @@ fn noop_fold_variant(v: variant_, fld: @ast_fold) -> variant_ { ctor_id: option::map(&struct_def.ctor_id, |c| fld.new_id(*c)) }) } - enum_variant_kind(ref enum_definition) => { - let variants = vec::map((*enum_definition).variants, - |x| fld.fold_variant(*x)); - let common = option::map(&(*enum_definition).common, - |x| fold_struct_def(*x, fld)); + let variants = do (*enum_definition).variants.map |x| { + fld.fold_variant(x) + }; + let common = do (*enum_definition).common.map |x| { + fold_struct_def(*x, fld) + }; kind = enum_variant_kind( ast::enum_def(ast::enum_def_ { variants: variants, @@ -641,7 +717,7 @@ fn noop_fold_variant(v: variant_, fld: @ast_fold) -> variant_ { } let fold_attribute = |x| fold_attribute_(x, fld); - let attrs = vec::map(v.attrs, |x| fold_attribute(*x)); + let attrs = v.attrs.map(|x| fold_attribute(*x)); let de = match v.disr_expr { Some(e) => Some(fld.fold_expr(e)), @@ -657,19 +733,21 @@ fn noop_fold_variant(v: variant_, fld: @ast_fold) -> variant_ { } } -fn noop_fold_ident(&&i: ident, _fld: @ast_fold) -> ident { - return /* FIXME (#2543) */ copy i; +fn noop_fold_ident(i: ident, _fld: @ast_fold) -> ident { + /* FIXME (#2543) */ copy i } -fn noop_fold_path(&&p: path, fld: @ast_fold) -> path { - ast::path { span: fld.new_span(p.span), - global: p.global, - idents: p.idents.map(|x| fld.fold_ident(*x)), - rp: p.rp, - types: p.types.map(|x| fld.fold_ty(*x)) } +fn noop_fold_path(p: @path, fld: @ast_fold) -> path { + ast::path { + span: fld.new_span(p.span), + global: p.global, + idents: p.idents.map(|x| fld.fold_ident(*x)), + rp: p.rp, + types: p.types.map(|x| fld.fold_ty(*x)), + } } -fn noop_fold_local(l: local_, fld: @ast_fold) -> local_ { +fn noop_fold_local(l: &local_, fld: @ast_fold) -> local_ { local_ { is_mutbl: l.is_mutbl, ty: fld.fold_ty(l.ty), @@ -681,8 +759,8 @@ fn noop_fold_local(l: local_, fld: @ast_fold) -> local_ { /* temporarily eta-expand because of a compiler bug with using `fn` as a value */ -fn noop_map_exprs(f: fn@(&&v: @expr) -> @expr, es: ~[@expr]) -> ~[@expr] { - return vec::map(es, |x| f(*x)); +fn noop_map_exprs(f: fn@(@expr) -> @expr, es: &[@expr]) -> ~[@expr] { + es.map(|x| f(*x)) } fn noop_id(i: node_id) -> node_id { return i; } @@ -690,38 +768,40 @@ fn noop_id(i: node_id) -> node_id { return i; } fn noop_span(sp: span) -> span { return sp; } pub fn default_ast_fold() -> ast_fold_fns { - return @AstFoldFns {fold_crate: wrap(noop_fold_crate), - fold_view_item: noop_fold_view_item, - fold_foreign_item: noop_fold_foreign_item, - fold_item: noop_fold_item, - fold_struct_field: noop_fold_struct_field, - fold_item_underscore: noop_fold_item_underscore, - fold_method: noop_fold_method, - fold_block: wrap(noop_fold_block), - fold_stmt: wrap(noop_fold_stmt), - fold_arm: noop_fold_arm, - fold_pat: wrap(noop_fold_pat), - fold_decl: wrap(noop_fold_decl), - fold_expr: wrap(noop_fold_expr), - fold_ty: wrap(noop_fold_ty), - fold_mod: noop_fold_mod, - fold_foreign_mod: noop_fold_foreign_mod, - fold_variant: wrap(noop_fold_variant), - fold_ident: noop_fold_ident, - fold_path: noop_fold_path, - fold_local: wrap(noop_fold_local), - map_exprs: noop_map_exprs, - new_id: noop_id, - new_span: noop_span}; + @AstFoldFns { + fold_crate: wrap(noop_fold_crate), + fold_view_item: noop_fold_view_item, + fold_foreign_item: noop_fold_foreign_item, + fold_item: noop_fold_item, + fold_struct_field: noop_fold_struct_field, + fold_item_underscore: noop_fold_item_underscore, + fold_method: noop_fold_method, + fold_block: wrap(noop_fold_block), + fold_stmt: wrap(noop_fold_stmt), + fold_arm: noop_fold_arm, + fold_pat: wrap(noop_fold_pat), + fold_decl: wrap(noop_fold_decl), + fold_expr: wrap(noop_fold_expr), + fold_ty: wrap(noop_fold_ty), + fold_mod: noop_fold_mod, + fold_foreign_mod: noop_fold_foreign_mod, + fold_variant: wrap(noop_fold_variant), + fold_ident: noop_fold_ident, + fold_path: noop_fold_path, + fold_local: wrap(noop_fold_local), + map_exprs: noop_map_exprs, + new_id: noop_id, + new_span: noop_span, + } } impl ast_fold for AstFoldFns { /* naturally, a macro to write these would be nice */ - fn fold_crate(@self, c: crate) -> crate { - let (n, s) = (self.fold_crate)(c.node, c.span, self as @ast_fold); + fn fold_crate(@self, c: &crate) -> crate { + let (n, s) = (self.fold_crate)(&c.node, c.span, self as @ast_fold); spanned { node: n, span: (self.new_span)(s) } } - fn fold_view_item(@self, &&x: @view_item) -> + fn fold_view_item(@self, x: @view_item) -> @view_item { @ast::view_item { node: (self.fold_view_item)(x.node, self as @ast_fold), @@ -731,14 +811,13 @@ impl ast_fold for AstFoldFns { span: (self.new_span)(x.span), } } - fn fold_foreign_item(@self, &&x: @foreign_item) - -> @foreign_item { - return (self.fold_foreign_item)(x, self as @ast_fold); + fn fold_foreign_item(@self, x: @foreign_item) -> @foreign_item { + (self.fold_foreign_item)(x, self as @ast_fold) } - fn fold_item(@self, &&i: @item) -> Option<@item> { - return (self.fold_item)(i, self as @ast_fold); + fn fold_item(@self, i: @item) -> Option<@item> { + (self.fold_item)(i, self as @ast_fold) } - fn fold_struct_field(@self, &&sf: @struct_field) -> @struct_field { + fn fold_struct_field(@self, sf: @struct_field) -> @struct_field { @spanned { node: ast::struct_field_ { kind: copy sf.node.kind, @@ -748,39 +827,37 @@ impl ast_fold for AstFoldFns { span: (self.new_span)(sf.span), } } - fn fold_item_underscore(@self, i: item_) -> - item_ { - return (self.fold_item_underscore)(i, self as @ast_fold); + fn fold_item_underscore(@self, i: &item_) -> item_ { + (self.fold_item_underscore)(i, self as @ast_fold) } - fn fold_method(@self, &&x: @method) - -> @method { - return (self.fold_method)(x, self as @ast_fold); + fn fold_method(@self, x: @method) -> @method { + (self.fold_method)(x, self as @ast_fold) } - fn fold_block(@self, x: blk) -> blk { - let (n, s) = (self.fold_block)(x.node, x.span, self as @ast_fold); + fn fold_block(@self, x: &blk) -> blk { + let (n, s) = (self.fold_block)(&x.node, x.span, self as @ast_fold); spanned { node: n, span: (self.new_span)(s) } } - fn fold_stmt(@self, &&x: @stmt) -> @stmt { - let (n, s) = (self.fold_stmt)(x.node, x.span, self as @ast_fold); + fn fold_stmt(@self, x: &stmt) -> @stmt { + let (n, s) = (self.fold_stmt)(&x.node, x.span, self as @ast_fold); @spanned { node: n, span: (self.new_span)(s) } } - fn fold_arm(@self, x: arm) -> arm { - return (self.fold_arm)(x, self as @ast_fold); + fn fold_arm(@self, x: &arm) -> arm { + (self.fold_arm)(x, self as @ast_fold) } - fn fold_pat(@self, &&x: @pat) -> @pat { - let (n, s) = (self.fold_pat)(x.node, x.span, self as @ast_fold); + fn fold_pat(@self, x: @pat) -> @pat { + let (n, s) = (self.fold_pat)(&x.node, x.span, self as @ast_fold); @pat { id: (self.new_id)(x.id), node: n, span: (self.new_span)(s), } } - fn fold_decl(@self, &&x: @decl) -> @decl { - let (n, s) = (self.fold_decl)(x.node, x.span, self as @ast_fold); + fn fold_decl(@self, x: @decl) -> @decl { + let (n, s) = (self.fold_decl)(&x.node, x.span, self as @ast_fold); @spanned { node: n, span: (self.new_span)(s) } } - fn fold_expr(@self, &&x: @expr) -> @expr { - let (n, s) = (self.fold_expr)(x.node, x.span, self as @ast_fold); + fn fold_expr(@self, x: @expr) -> @expr { + let (n, s) = (self.fold_expr)(&x.node, x.span, self as @ast_fold); @expr { id: (self.new_id)(x.id), callee_id: (self.new_id)(x.callee_id), @@ -788,39 +865,39 @@ impl ast_fold for AstFoldFns { span: (self.new_span)(s), } } - fn fold_ty(@self, &&x: @Ty) -> @Ty { - let (n, s) = (self.fold_ty)(x.node, x.span, self as @ast_fold); + fn fold_ty(@self, x: @Ty) -> @Ty { + let (n, s) = (self.fold_ty)(&x.node, x.span, self as @ast_fold); @Ty { id: (self.new_id)(x.id), node: n, span: (self.new_span)(s), } } - fn fold_mod(@self, x: _mod) -> _mod { - return (self.fold_mod)(x, self as @ast_fold); + fn fold_mod(@self, x: &_mod) -> _mod { + (self.fold_mod)(x, self as @ast_fold) } - fn fold_foreign_mod(@self, x: foreign_mod) -> - foreign_mod { - return (self.fold_foreign_mod)(x, self as @ast_fold); + fn fold_foreign_mod(@self, x: &foreign_mod) -> foreign_mod { + (self.fold_foreign_mod)(x, self as @ast_fold) } - fn fold_variant(@self, x: variant) -> - variant { - let (n, s) = (self.fold_variant)(x.node, x.span, self as @ast_fold); + fn fold_variant(@self, x: &variant) -> variant { + let (n, s) = (self.fold_variant)(&x.node, x.span, self as @ast_fold); spanned { node: n, span: (self.new_span)(s) } } - fn fold_ident(@self, &&x: ident) -> ident { - return (self.fold_ident)(x, self as @ast_fold); + fn fold_ident(@self, x: ident) -> ident { + (self.fold_ident)(x, self as @ast_fold) } - fn fold_path(@self, &&x: @path) -> @path { - @(self.fold_path)(*x, self as @ast_fold) + fn fold_path(@self, x: @path) -> @path { + @(self.fold_path)(x, self as @ast_fold) } - fn fold_local(@self, &&x: @local) -> @local { - let (n, s) = (self.fold_local)(x.node, x.span, self as @ast_fold); + fn fold_local(@self, x: @local) -> @local { + let (n, s) = (self.fold_local)(&x.node, x.span, self as @ast_fold); @spanned { node: n, span: (self.new_span)(s) } } - fn map_exprs(@self, - f: fn@(&&v: @expr) -> @expr, - e: ~[@expr]) -> ~[@expr] { + fn map_exprs( + @self, + f: fn@(@expr) -> @expr, + e: &[@expr] + ) -> ~[@expr] { (self.map_exprs)(f, e) } fn new_id(@self, node_id: ast::node_id) -> node_id { diff --git a/src/libsyntax/opt_vec.rs b/src/libsyntax/opt_vec.rs index 340e2614d2edc..16db384bb062c 100644 --- a/src/libsyntax/opt_vec.rs +++ b/src/libsyntax/opt_vec.rs @@ -92,7 +92,7 @@ impl OptVec { let mut v0 = ~[t]; match *self { Empty => {} - Vec(v1) => { v0.push_all(v1); } + Vec(ref v1) => { v0.push_all(*v1); } } return Vec(v0); } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index b5c4ff3ddd739..28f84613740fb 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -47,7 +47,10 @@ impl parser_attr for Parser { } token::DOC_COMMENT(s) => { let attr = ::attr::mk_sugared_doc_attr( - *self.id_to_str(s), self.span.lo, self.span.hi); + copy *self.id_to_str(s), + self.span.lo, + self.span.hi + ); if attr.node.style != ast::attr_outer { self.fatal(~"expected outer comment"); } @@ -62,18 +65,18 @@ impl parser_attr for Parser { fn parse_attribute(style: ast::attr_style) -> ast::attribute { let lo = self.span.lo; - self.expect(token::POUND); + self.expect(&token::POUND); return self.parse_attribute_naked(style, lo); } fn parse_attribute_naked(style: ast::attr_style, lo: BytePos) -> ast::attribute { - self.expect(token::LBRACKET); + self.expect(&token::LBRACKET); let meta_item = self.parse_meta_item(); - self.expect(token::RBRACKET); + self.expect(&token::RBRACKET); let mut hi = self.span.hi; return spanned(lo, hi, ast::attribute_ { style: style, - value: *meta_item, + value: meta_item, is_sugared_doc: false }); } @@ -114,7 +117,10 @@ impl parser_attr for Parser { } token::DOC_COMMENT(s) => { let attr = ::attr::mk_sugared_doc_attr( - *self.id_to_str(s), self.span.lo, self.span.hi); + copy *self.id_to_str(s), + self.span.lo, + self.span.hi + ); self.bump(); if attr.node.style == ast::attr_inner { inner_attrs += ~[attr]; @@ -152,15 +158,18 @@ impl parser_attr for Parser { } fn parse_meta_seq() -> ~[@ast::meta_item] { - return self.parse_seq(token::LPAREN, token::RPAREN, - seq_sep_trailing_disallowed(token::COMMA), - |p| p.parse_meta_item()).node; + copy self.parse_seq( + &token::LPAREN, + &token::RPAREN, + seq_sep_trailing_disallowed(token::COMMA), + |p| p.parse_meta_item() + ).node } fn parse_optional_meta() -> ~[@ast::meta_item] { match *self.token { - token::LPAREN => return self.parse_meta_seq(), - _ => return ~[] + token::LPAREN => self.parse_meta_seq(), + _ => ~[] } } } diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index 152bd9b0ce417..714ae9a0fd520 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -221,7 +221,7 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut ~[~str], if col < len { s1 = str::slice(s, col, len); } else { s1 = ~""; } - } else { s1 = s; } + } else { s1 = /*bad*/ copy s; } log(debug, ~"pushing line: " + s1); lines.push(s1); } @@ -321,7 +321,7 @@ pub struct lit { } pub fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, - path: ~str, + +path: ~str, srdr: io::Reader) -> (~[cmnt], ~[lit]) { let src = @str::from_bytes(srdr.read_whole_stream()); let itr = parse::token::mk_fake_ident_interner(); @@ -354,12 +354,12 @@ pub fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, rdr.next_token(); //discard, and look ahead; we're working with internal state let TokenAndSpan {tok: tok, sp: sp} = rdr.peek(); - if token::is_lit(tok) { + if token::is_lit(&tok) { let s = get_str_from(rdr, bstart); - literals.push(lit {lit: s, pos: sp.lo}); - log(debug, ~"tok lit: " + s); + literals.push(lit {lit: /*bad*/ copy s, pos: sp.lo}); + debug!("tok lit: %s", s); } else { - log(debug, ~"tok: " + token::to_str(rdr.interner, tok)); + debug!("tok: %s", token::to_str(rdr.interner, &tok)); } first_read = false; } diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index a92eb2db42aa2..f538ed8030c13 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -30,52 +30,62 @@ pub struct SeqSep { trailing_sep_allowed: bool } -pub fn seq_sep_trailing_disallowed(t: token::Token) -> SeqSep { +pub fn seq_sep_trailing_disallowed(+t: token::Token) -> SeqSep { SeqSep { - sep: option::Some(t), - trailing_sep_allowed: false + sep: Some(t), + trailing_sep_allowed: false, } } -pub fn seq_sep_trailing_allowed(t: token::Token) -> SeqSep { +pub fn seq_sep_trailing_allowed(+t: token::Token) -> SeqSep { SeqSep { - sep: option::Some(t), - trailing_sep_allowed: true + sep: Some(t), + trailing_sep_allowed: true, } } pub fn seq_sep_none() -> SeqSep { SeqSep { - sep: option::None, - trailing_sep_allowed: false + sep: None, + trailing_sep_allowed: false, } } -pub fn token_to_str(reader: reader, ++token: token::Token) -> ~str { +pub fn token_to_str(reader: reader, token: &token::Token) -> ~str { token::to_str(reader.interner(), token) } pub impl Parser { - fn unexpected_last(t: token::Token) -> ! { + fn unexpected_last(t: &token::Token) -> ! { self.span_fatal( *self.last_span, - ~"unexpected token: `" + token_to_str(self.reader, t) + ~"`"); + fmt!( + "unexpected token: `%s`", + token_to_str(self.reader, t) + ) + ); } fn unexpected() -> ! { - self.fatal(~"unexpected token: `" - + token_to_str(self.reader, *self.token) + ~"`"); + self.fatal( + fmt!( + "unexpected token: `%s`", + token_to_str(self.reader, © *self.token) + ) + ); } // expect and consume the token t. Signal an error if // the next token is not t. - fn expect(t: token::Token) { - if *self.token == t { + fn expect(t: &token::Token) { + if *self.token == *t { self.bump(); } else { - let mut s: ~str = ~"expected `"; - s += token_to_str(self.reader, t); - s += ~"` but found `"; - s += token_to_str(self.reader, *self.token); - self.fatal(s + ~"`"); + self.fatal( + fmt!( + "expected `%s` but found `%s`", + token_to_str(self.reader, t), + token_to_str(self.reader, © *self.token) + ) + ) } } @@ -83,12 +93,23 @@ pub impl Parser { self.check_strict_keywords(); self.check_reserved_keywords(); match *self.token { - token::IDENT(i, _) => { self.bump(); return i; } - token::INTERPOLATED(token::nt_ident(*)) => { self.bug( - ~"ident interpolation not converted to real token"); } - _ => { self.fatal(~"expected ident, found `" - + token_to_str(self.reader, *self.token) - + ~"`"); } + token::IDENT(i, _) => { + self.bump(); + i + } + token::INTERPOLATED(token::nt_ident(*)) => { + self.bug( + ~"ident interpolation not converted to real token" + ); + } + _ => { + self.fatal( + fmt!( + "expected ident, found `%s`", + token_to_str(self.reader, © *self.token) + ) + ); + } } } @@ -106,37 +127,37 @@ pub impl Parser { // consume token 'tok' if it exists. Returns true if the given // token was present, false otherwise. - fn eat(tok: token::Token) -> bool { - return if *self.token == tok { self.bump(); true } else { false }; + fn eat(tok: &token::Token) -> bool { + return if *self.token == *tok { self.bump(); true } else { false }; } // Storing keywords as interned idents instead of strings would be nifty. // A sanity check that the word we are asking for is a known keyword - fn require_keyword(word: ~str) { - if !self.keywords.contains_key(&word) { - self.bug(fmt!("unknown keyword: %s", word)); + fn require_keyword(word: &~str) { + if !self.keywords.contains_key(word) { + self.bug(fmt!("unknown keyword: %s", *word)); } } - fn token_is_word(word: ~str, ++tok: token::Token) -> bool { - match tok { - token::IDENT(sid, false) => { *self.id_to_str(sid) == word } - _ => { false } + pure fn token_is_word(word: &~str, tok: &token::Token) -> bool { + match *tok { + token::IDENT(sid, false) => { *self.id_to_str(sid) == *word } + _ => { false } } } - fn token_is_keyword(word: ~str, ++tok: token::Token) -> bool { + fn token_is_keyword(word: &~str, tok: &token::Token) -> bool { self.require_keyword(word); self.token_is_word(word, tok) } - fn is_keyword(word: ~str) -> bool { - self.token_is_keyword(word, *self.token) + fn is_keyword(word: &~str) -> bool { + self.token_is_keyword(word, © *self.token) } - fn is_any_keyword(tok: token::Token) -> bool { - match tok { + fn is_any_keyword(tok: &token::Token) -> bool { + match *tok { token::IDENT(sid, false) => { self.keywords.contains_key(self.id_to_str(sid)) } @@ -144,62 +165,66 @@ pub impl Parser { } } - fn eat_keyword(word: ~str) -> bool { + fn eat_keyword(word: &~str) -> bool { self.require_keyword(word); let is_kw = match *self.token { - token::IDENT(sid, false) => (word == *self.id_to_str(sid)), - _ => false + token::IDENT(sid, false) => *word == *self.id_to_str(sid), + _ => false }; if is_kw { self.bump() } is_kw } - fn expect_keyword(word: ~str) { + fn expect_keyword(word: &~str) { self.require_keyword(word); if !self.eat_keyword(word) { - self.fatal(~"expected `" + word + ~"`, found `" + - token_to_str(self.reader, *self.token) + - ~"`"); + self.fatal( + fmt!( + "expected `%s`, found `%s`", + *word, + token_to_str(self.reader, © *self.token) + ) + ); } } - fn is_strict_keyword(word: ~str) -> bool { - self.strict_keywords.contains_key(&word) + fn is_strict_keyword(word: &~str) -> bool { + self.strict_keywords.contains_key(word) } fn check_strict_keywords() { match *self.token { - token::IDENT(_, false) => { - let w = token_to_str(self.reader, *self.token); - self.check_strict_keywords_(w); - } - _ => () + token::IDENT(_, false) => { + let w = token_to_str(self.reader, © *self.token); + self.check_strict_keywords_(&w); + } + _ => () } } - fn check_strict_keywords_(w: ~str) { + fn check_strict_keywords_(w: &~str) { if self.is_strict_keyword(w) { - self.fatal(~"found `" + w + ~"` in ident position"); + self.fatal(fmt!("found `%s` in ident position", *w)); } } - fn is_reserved_keyword(word: ~str) -> bool { - self.reserved_keywords.contains_key(&word) + fn is_reserved_keyword(word: &~str) -> bool { + self.reserved_keywords.contains_key(word) } fn check_reserved_keywords() { match *self.token { - token::IDENT(_, false) => { - let w = token_to_str(self.reader, *self.token); - self.check_reserved_keywords_(w); - } - _ => () + token::IDENT(_, false) => { + let w = token_to_str(self.reader, © *self.token); + self.check_reserved_keywords_(&w); + } + _ => () } } - fn check_reserved_keywords_(w: ~str) { + fn check_reserved_keywords_(w: &~str) { if self.is_reserved_keyword(w) { - self.fatal(~"`" + w + ~"` is a reserved keyword"); + self.fatal(fmt!("`%s` is a reserved keyword", *w)); } } @@ -209,14 +234,16 @@ pub impl Parser { if *self.token == token::GT { self.bump(); } else if *self.token == token::BINOP(token::SHR) { - self.replace_token(token::GT, - self.span.lo + BytePos(1u), - self.span.hi); + self.replace_token( + token::GT, + self.span.lo + BytePos(1u), + self.span.hi + ); } else { let mut s: ~str = ~"expected `"; - s += token_to_str(self.reader, token::GT); + s += token_to_str(self.reader, &token::GT); s += ~"`, found `"; - s += token_to_str(self.reader, *self.token); + s += token_to_str(self.reader, © *self.token); s += ~"`"; self.fatal(s); } @@ -224,8 +251,10 @@ pub impl Parser { // parse a sequence bracketed by '<' and '>', stopping // before the '>'. - fn parse_seq_to_before_gt(sep: Option, - f: fn(Parser) -> T) -> OptVec { + fn parse_seq_to_before_gt( + sep: Option, + f: fn(&Parser) -> T + ) -> OptVec { let mut first = true; let mut v = opt_vec::Empty; while *self.token != token::GT @@ -233,17 +262,19 @@ pub impl Parser { match sep { Some(ref t) => { if first { first = false; } - else { self.expect(*t); } + else { self.expect(t); } } _ => () } - v.push(f(self)); + v.push(f(&self)); } return v; } - fn parse_seq_to_gt(sep: Option, - f: fn(Parser) -> T) -> OptVec { + fn parse_seq_to_gt( + sep: Option, + f: fn(&Parser) -> T + ) -> OptVec { let v = self.parse_seq_to_before_gt(sep, f); self.expect_gt(); return v; @@ -252,30 +283,36 @@ pub impl Parser { // parse a sequence, including the closing delimiter. The function // f must consume tokens until reaching the next separator or // closing bracket. - fn parse_seq_to_end(ket: token::Token, sep: SeqSep, - f: fn(Parser) -> T) -> ~[T] { + fn parse_seq_to_end( + ket: &token::Token, + sep: SeqSep, + f: fn(&Parser) -> T + ) -> ~[T] { let val = self.parse_seq_to_before_end(ket, sep, f); self.bump(); - return val; + val } // parse a sequence, not including the closing delimiter. The function // f must consume tokens until reaching the next separator or // closing bracket. - fn parse_seq_to_before_end(ket: token::Token, sep: SeqSep, - f: fn(Parser) -> T) -> ~[T] { + fn parse_seq_to_before_end( + ket: &token::Token, + sep: SeqSep, + f: fn(&Parser) -> T + ) -> ~[T] { let mut first: bool = true; let mut v: ~[T] = ~[]; - while *self.token != ket { + while *self.token != *ket { match sep.sep { Some(ref t) => { if first { first = false; } - else { self.expect(*t); } + else { self.expect(t); } } _ => () } - if sep.trailing_sep_allowed && *self.token == ket { break; } - v.push(f(self)); + if sep.trailing_sep_allowed && *self.token == *ket { break; } + v.push(f(&self)); } return v; } @@ -283,25 +320,31 @@ pub impl Parser { // parse a sequence, including the closing delimiter. The function // f must consume tokens until reaching the next separator or // closing bracket. - fn parse_unspanned_seq(+bra: token::Token, - +ket: token::Token, - sep: SeqSep, - f: fn(Parser) -> T) -> ~[T] { + fn parse_unspanned_seq( + bra: &token::Token, + ket: &token::Token, + sep: SeqSep, + f: fn(&Parser) -> T + ) -> ~[T] { self.expect(bra); - let result = self.parse_seq_to_before_end::(ket, sep, f); + let result = self.parse_seq_to_before_end(ket, sep, f); self.bump(); - return result; + result } // NB: Do not use this function unless you actually plan to place the // spanned list in the AST. - fn parse_seq(bra: token::Token, ket: token::Token, sep: SeqSep, - f: fn(Parser) -> T) -> spanned<~[T]> { + fn parse_seq( + bra: &token::Token, + ket: &token::Token, + sep: SeqSep, + f: fn(&Parser) -> T + ) -> spanned<~[T]> { let lo = self.span.lo; self.expect(bra); - let result = self.parse_seq_to_before_end::(ket, sep, f); + let result = self.parse_seq_to_before_end(ket, sep, f); let hi = self.span.hi; self.bump(); - return spanned(lo, hi, result); + spanned(lo, hi, result) } } diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index c928719c2081c..ed71fa411c69f 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -103,8 +103,8 @@ fn dup_string_reader(r: @mut StringReader) -> @mut StringReader { curr: r.curr, filemap: r.filemap, interner: r.interner, - peek_tok: r.peek_tok, - peek_span: r.peek_span + peek_tok: copy r.peek_tok, + peek_span: copy r.peek_span } } @@ -112,9 +112,12 @@ impl reader for StringReader { fn is_eof(@mut self) -> bool { is_eof(self) } // return the next token. EFFECT: advances the string_reader. fn next_token(@mut self) -> TokenAndSpan { - let ret_val = TokenAndSpan {tok: self.peek_tok, sp: self.peek_span}; + let ret_val = TokenAndSpan { + tok: copy self.peek_tok, + sp: copy self.peek_span, + }; string_advance_token(self); - return ret_val; + ret_val } fn fatal(@mut self, m: ~str) -> ! { self.span_diagnostic.span_fatal(copy self.peek_span, m) @@ -122,7 +125,10 @@ impl reader for StringReader { fn span_diag(@mut self) -> span_handler { self.span_diagnostic } pure fn interner(@mut self) -> @token::ident_interner { self.interner } fn peek(@mut self) -> TokenAndSpan { - TokenAndSpan {tok: self.peek_tok, sp: self.peek_span} + TokenAndSpan { + tok: copy self.peek_tok, + sp: copy self.peek_span, + } } fn dup(@mut self) -> reader { dup_string_reader(self) as reader } } @@ -136,7 +142,10 @@ impl reader for TtReader { fn span_diag(@mut self) -> span_handler { self.sp_diag } pure fn interner(@mut self) -> @token::ident_interner { self.interner } fn peek(@mut self) -> TokenAndSpan { - TokenAndSpan { tok: self.cur_tok, sp: self.cur_span } + TokenAndSpan { + tok: copy self.cur_tok, + sp: copy self.cur_span, + } } fn dup(@mut self) -> reader { dup_tt_reader(self) as reader } } @@ -145,8 +154,8 @@ impl reader for TtReader { fn string_advance_token(r: @mut StringReader) { match (consume_whitespace_and_comments(r)) { Some(comment) => { - r.peek_tok = comment.tok; - r.peek_span = comment.sp; + r.peek_tok = copy comment.tok; + r.peek_span = copy comment.sp; }, None => { if is_eof(r) { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 923bc738cf0de..887f064018f37 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -81,70 +81,113 @@ pub fn new_parse_sess_special_handler(sh: span_handler, cm: @codemap::CodeMap) // this appears to be the main entry point for rust parsing by // rustc and crate: -pub fn parse_crate_from_file(input: &Path, cfg: ast::crate_cfg, - sess: @mut ParseSess) -> @ast::crate { - let p = new_parser_from_file(sess, cfg, input); - p.parse_crate_mod(cfg) +pub fn parse_crate_from_file( + input: &Path, + cfg: ast::crate_cfg, + sess: @mut ParseSess +) -> @ast::crate { + let p = new_parser_from_file(sess, /*bad*/ copy cfg, input); + p.parse_crate_mod(/*bad*/ copy cfg) // why is there no p.abort_if_errors here? } -pub fn parse_crate_from_source_str(name: ~str, - source: @~str, - cfg: ast::crate_cfg, - sess: @mut ParseSess) -> @ast::crate { - let p = new_parser_from_source_str(sess, cfg, name, - codemap::FssNone, source); - maybe_aborted(p.parse_crate_mod(cfg),p) +pub fn parse_crate_from_source_str( + name: ~str, + source: @~str, + cfg: ast::crate_cfg, + sess: @mut ParseSess +) -> @ast::crate { + let p = new_parser_from_source_str( + sess, + /*bad*/ copy cfg, + /*bad*/ copy name, + codemap::FssNone, + source + ); + maybe_aborted(p.parse_crate_mod(/*bad*/ copy cfg),p) } -pub fn parse_expr_from_source_str(name: ~str, - source: @~str, - cfg: ast::crate_cfg, - sess: @mut ParseSess) -> @ast::expr { - let p = new_parser_from_source_str(sess, cfg, name, - codemap::FssNone, source); +pub fn parse_expr_from_source_str( + name: ~str, + source: @~str, + +cfg: ast::crate_cfg, + sess: @mut ParseSess +) -> @ast::expr { + let p = new_parser_from_source_str( + sess, + cfg, + /*bad*/ copy name, + codemap::FssNone, + source + ); maybe_aborted(p.parse_expr(), p) } -pub fn parse_item_from_source_str(name: ~str, - source: @~str, - cfg: ast::crate_cfg, - +attrs: ~[ast::attribute], - sess: @mut ParseSess) - -> Option<@ast::item> { - let p = new_parser_from_source_str(sess, cfg, name, - codemap::FssNone, source); +pub fn parse_item_from_source_str( + name: ~str, + source: @~str, + +cfg: ast::crate_cfg, + +attrs: ~[ast::attribute], + sess: @mut ParseSess +) -> Option<@ast::item> { + let p = new_parser_from_source_str( + sess, + cfg, + /*bad*/ copy name, + codemap::FssNone, + source + ); maybe_aborted(p.parse_item(attrs),p) } -pub fn parse_stmt_from_source_str(name: ~str, - source: @~str, - cfg: ast::crate_cfg, - +attrs: ~[ast::attribute], - sess: @mut ParseSess) -> @ast::stmt { - let p = new_parser_from_source_str(sess, cfg, name, - codemap::FssNone, source); +pub fn parse_stmt_from_source_str( + name: ~str, + source: @~str, + +cfg: ast::crate_cfg, + +attrs: ~[ast::attribute], + sess: @mut ParseSess +) -> @ast::stmt { + let p = new_parser_from_source_str( + sess, + cfg, + /*bad*/ copy name, + codemap::FssNone, + source + ); maybe_aborted(p.parse_stmt(attrs),p) } -pub fn parse_tts_from_source_str(name: ~str, - source: @~str, - cfg: ast::crate_cfg, - sess: @mut ParseSess) -> ~[ast::token_tree] { - let p = new_parser_from_source_str(sess, cfg, name, - codemap::FssNone, source); +pub fn parse_tts_from_source_str( + name: ~str, + source: @~str, + +cfg: ast::crate_cfg, + sess: @mut ParseSess +) -> ~[ast::token_tree] { + let p = new_parser_from_source_str( + sess, + cfg, + /*bad*/ copy name, + codemap::FssNone, + source + ); *p.quote_depth += 1u; maybe_aborted(p.parse_all_token_trees(),p) } -pub fn parse_from_source_str(f: fn (p: Parser) -> T, - name: ~str, ss: codemap::FileSubstr, - source: @~str, cfg: ast::crate_cfg, - sess: @mut ParseSess) - -> T -{ - let p = new_parser_from_source_str(sess, cfg, name, ss, - source); +pub fn parse_from_source_str( + f: fn (Parser) -> T, + name: ~str, ss: codemap::FileSubstr, + source: @~str, + +cfg: ast::crate_cfg, + sess: @mut ParseSess +) -> T { + let p = new_parser_from_source_str( + sess, + cfg, + /*bad*/ copy name, + /*bad*/ copy ss, + source + ); let r = f(p); if !p.reader.is_eof() { p.reader.fatal(~"expected end-of-string"); @@ -160,40 +203,51 @@ pub fn next_node_id(sess: @mut ParseSess) -> node_id { return rv; } -pub fn new_parser_from_source_str(sess: @mut ParseSess, cfg: ast::crate_cfg, - +name: ~str, +ss: codemap::FileSubstr, - source: @~str) -> Parser { +pub fn new_parser_from_source_str( + sess: @mut ParseSess, + +cfg: ast::crate_cfg, + +name: ~str, + +ss: codemap::FileSubstr, + source: @~str +) -> Parser { let filemap = sess.cm.new_filemap_w_substr(name, ss, source); - let srdr = lexer::new_string_reader(copy sess.span_diagnostic, - filemap, - sess.interner); - return Parser(sess, cfg, srdr as reader); + let srdr = lexer::new_string_reader( + copy sess.span_diagnostic, + filemap, + sess.interner + ); + Parser(sess, cfg, srdr as reader) } /// Read the entire source file, return a parser /// that draws from that string -pub fn new_parser_result_from_file(sess: @mut ParseSess, - cfg: ast::crate_cfg, - path: &Path) - -> Result { +pub fn new_parser_result_from_file( + sess: @mut ParseSess, + +cfg: ast::crate_cfg, + path: &Path +) -> Result { match io::read_whole_file_str(path) { - result::Ok(src) => { + Ok(src) => { + let filemap = sess.cm.new_filemap(path.to_str(), @src); + let srdr = lexer::new_string_reader( + copy sess.span_diagnostic, + filemap, + sess.interner + ); + Ok(Parser(sess, cfg, srdr as reader)) - let filemap = sess.cm.new_filemap(path.to_str(), @src); - let srdr = lexer::new_string_reader(copy sess.span_diagnostic, - filemap, - sess.interner); - Ok(Parser(sess, cfg, srdr as reader)) - - } - result::Err(e) => Err(e) + } + Err(e) => Err(e) } } /// Create a new parser, handling errors as appropriate /// if the file doesn't exist -pub fn new_parser_from_file(sess: @mut ParseSess, cfg: ast::crate_cfg, - path: &Path) -> Parser { +pub fn new_parser_from_file( + sess: @mut ParseSess, + +cfg: ast::crate_cfg, + path: &Path +) -> Parser { match new_parser_result_from_file(sess, cfg, path) { Ok(parser) => parser, Err(e) => { @@ -204,8 +258,12 @@ pub fn new_parser_from_file(sess: @mut ParseSess, cfg: ast::crate_cfg, /// Create a new parser based on a span from an existing parser. Handles /// error messages correctly when the file does not exist. -pub fn new_sub_parser_from_file(sess: @mut ParseSess, cfg: ast::crate_cfg, - path: &Path, sp: span) -> Parser { +pub fn new_sub_parser_from_file( + sess: @mut ParseSess, + +cfg: ast::crate_cfg, + path: &Path, + sp: span +) -> Parser { match new_parser_result_from_file(sess, cfg, path) { Ok(parser) => parser, Err(e) => { @@ -214,11 +272,18 @@ pub fn new_sub_parser_from_file(sess: @mut ParseSess, cfg: ast::crate_cfg, } } -pub fn new_parser_from_tts(sess: @mut ParseSess, cfg: ast::crate_cfg, - tts: ~[ast::token_tree]) -> Parser { - let trdr = lexer::new_tt_reader(copy sess.span_diagnostic, sess.interner, - None, tts); - return Parser(sess, cfg, trdr as reader) +pub fn new_parser_from_tts( + sess: @mut ParseSess, + +cfg: ast::crate_cfg, + +tts: ~[ast::token_tree] +) -> Parser { + let trdr = lexer::new_tt_reader( + copy sess.span_diagnostic, + sess.interner, + None, + tts + ); + Parser(sess, cfg, trdr as reader) } // abort if necessary diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index b384e7ebdd0f4..2b2f1f48034a2 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -218,9 +218,9 @@ pub impl Parser { } fn try_parse_obsolete_priv_section() -> bool { - if self.is_keyword(~"priv") && self.look_ahead(1) == token::LBRACE { - self.obsolete(*self.span, ObsoletePrivSection); - self.eat_keyword(~"priv"); + if self.is_keyword(&~"priv") && self.look_ahead(1) == token::LBRACE { + self.obsolete(copy *self.span, ObsoletePrivSection); + self.eat_keyword(&~"priv"); self.bump(); while *self.token != token::RBRACE { self.parse_single_class_item(ast::private); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 444f1201fc323..4c48b49b5d61e 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -130,31 +130,45 @@ enum view_item_parse_mode { The important thing is to make sure that lookahead doesn't balk at INTERPOLATED tokens */ macro_rules! maybe_whole_expr ( - ($p:expr) => ( match *$p.token { - INTERPOLATED(token::nt_expr(e)) => { - $p.bump(); - return e; - } - INTERPOLATED(token::nt_path(pt)) => { - $p.bump(); - return $p.mk_expr($p.span.lo, $p.span.lo, - expr_path(pt)); - } - _ => () - }) + ($p:expr) => ( + match *$p.token { + INTERPOLATED(token::nt_expr(e)) => { + $p.bump(); + return e; + } + INTERPOLATED(token::nt_path(pt)) => { + $p.bump(); + return $p.mk_expr( + $p.span.lo, + $p.span.hi, + expr_path(pt) + ); + } + _ => () + } + ) ) macro_rules! maybe_whole ( - ($p:expr, $constructor:ident) => ( match *$p.token { - INTERPOLATED(token::$constructor(x)) => { $p.bump(); return x; } + ($p:expr, $constructor:ident) => ( match copy *$p.token { + INTERPOLATED(token::$constructor(x)) => { + $p.bump(); + return x; + } _ => () }) ; - (deref $p:expr, $constructor:ident) => ( match *$p.token { - INTERPOLATED(token::$constructor(x)) => { $p.bump(); return *x; } + (deref $p:expr, $constructor:ident) => ( match copy *$p.token { + INTERPOLATED(token::$constructor(x)) => { + $p.bump(); + return copy *x; + } _ => () }) ; - (Some $p:expr, $constructor:ident) => ( match *$p.token { - INTERPOLATED(token::$constructor(x)) => { $p.bump(); return Some(x); } + (Some $p:expr, $constructor:ident) => ( match copy *$p.token { + INTERPOLATED(token::$constructor(x)) => { + $p.bump(); + return Some(x); + } _ => () }) ; (iovi $p:expr, $constructor:ident) => ( match *$p.token { @@ -164,9 +178,10 @@ macro_rules! maybe_whole ( } _ => () }) ; - (pair_empty $p:expr, $constructor:ident) => ( match *$p.token { + (pair_empty $p:expr, $constructor:ident) => ( match copy *$p.token { INTERPOLATED(token::$constructor(x)) => { - $p.bump(); return (~[], x); + $p.bump(); + return (~[], x); } _ => () }) @@ -193,11 +208,10 @@ struct ParsedItemsAndViewItems { /* ident is handled by common.rs */ pub fn Parser(sess: @mut ParseSess, - cfg: ast::crate_cfg, + +cfg: ast::crate_cfg, +rdr: reader) -> Parser { - let tok0 = rdr.next_token(); - let span0 = tok0.sp; + let tok0 = copy rdr.next_token(); let interner = rdr.interner(); Parser { @@ -205,15 +219,15 @@ pub fn Parser(sess: @mut ParseSess, interner: interner, sess: sess, cfg: cfg, - token: @mut tok0.tok, - span: @mut span0, - last_span: @mut span0, - buffer: @mut [TokenAndSpan {tok: tok0.tok, sp: span0}, ..4], + token: @mut copy tok0.tok, + span: @mut copy tok0.sp, + last_span: @mut copy tok0.sp, + buffer: @mut [copy tok0, .. 4], buffer_start: @mut 0, buffer_end: @mut 0, - tokens_consumed: @mut 0u, + tokens_consumed: @mut 0, restriction: @mut UNRESTRICTED, - quote_depth: @mut 0u, + quote_depth: @mut 0, keywords: token::keyword_table(), strict_keywords: token::strict_keyword_table(), reserved_keywords: token::reserved_keyword_table(), @@ -255,20 +269,20 @@ impl Drop for Parser { pub impl Parser { // advance the parser by one token fn bump() { - *self.last_span = *self.span; + *self.last_span = copy *self.span; let next = if *self.buffer_start == *self.buffer_end { self.reader.next_token() } else { - let next = self.buffer[*self.buffer_start]; + let next = copy self.buffer[*self.buffer_start]; *self.buffer_start = (*self.buffer_start + 1) & 3; next }; - *self.token = next.tok; - *self.span = next.sp; + *self.token = copy next.tok; + *self.span = copy next.sp; *self.tokens_consumed += 1u; } // EFFECT: replace the current token and span with the given one - fn replace_token(next: token::Token, +lo: BytePos, +hi: BytePos) { + fn replace_token(+next: token::Token, +lo: BytePos, +hi: BytePos) { *self.token = next; *self.span = mk_sp(lo, hi); } @@ -311,11 +325,11 @@ pub impl Parser { pure fn id_to_str(id: ident) -> @~str { self.sess.interner.get(id) } - fn token_is_closure_keyword(+tok: token::Token) -> bool { - self.token_is_keyword(~"pure", tok) || - self.token_is_keyword(~"unsafe", tok) || - self.token_is_keyword(~"once", tok) || - self.token_is_keyword(~"fn", tok) + fn token_is_closure_keyword(tok: &token::Token) -> bool { + self.token_is_keyword(&~"pure", tok) || + self.token_is_keyword(&~"unsafe", tok) || + self.token_is_keyword(&~"once", tok) || + self.token_is_keyword(&~"fn", tok) } fn parse_ty_bare_fn() -> ty_ @@ -335,7 +349,7 @@ pub impl Parser { */ let purity = self.parse_purity(); - self.expect_keyword(~"fn"); + self.expect_keyword(&~"fn"); return ty_bare_fn(@TyBareFn { abi: RustAbi, purity: purity, @@ -366,7 +380,7 @@ pub impl Parser { let purity = self.parse_purity(); let onceness = parse_onceness(&self); - self.expect_keyword(~"fn"); + self.expect_keyword(&~"fn"); let post_sigil = self.parse_fn_ty_sigil(); let sigil = match (pre_sigil, post_sigil) { @@ -394,14 +408,14 @@ pub impl Parser { }); fn parse_onceness(self: &Parser) -> Onceness { - if self.eat_keyword(~"once") {Once} else {Many} + if self.eat_keyword(&~"once") { Once } else { Many } } } fn parse_purity() -> purity { - if self.eat_keyword(~"pure") { + if self.eat_keyword(&~"pure") { return pure_fn; - } else if self.eat_keyword(~"unsafe") { + } else if self.eat_keyword(&~"unsafe") { return unsafe_fn; } else { return impure_fn; @@ -419,21 +433,26 @@ pub impl Parser { Lifetimes */ - if self.eat(token::LT) { + if self.eat(&token::LT) { let _lifetimes = self.parse_lifetimes(); - self.expect(token::GT); + self.expect(&token::GT); } let inputs = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), - |p| p.parse_arg_general(false)); + |p| p.parse_arg_general(false) + ); let (ret_style, ret_ty) = self.parse_ret_ty(); ast::fn_decl { inputs: inputs, output: ret_ty, cf: ret_style } } fn parse_trait_methods() -> ~[trait_method] { - do self.parse_unspanned_seq(token::LBRACE, token::RBRACE, - seq_sep_none()) |p| { + do self.parse_unspanned_seq( + &token::LBRACE, + &token::RBRACE, + seq_sep_none() + ) |p| { let attrs = p.parse_outer_attributes(); let lo = p.span.lo; let is_static = p.parse_staticness(); @@ -458,7 +477,7 @@ pub impl Parser { let hi = p.last_span.hi; debug!("parse_trait_methods(): trait method signature ends in \ `%s`", - token_to_str(p.reader, *p.token)); + token_to_str(p.reader, © *p.token)); match *p.token { token::SEMI => { p.bump(); @@ -496,8 +515,13 @@ pub impl Parser { }) } - _ => { p.fatal(~"expected `;` or `}` but found `" + - token_to_str(p.reader, *p.token) + ~"`"); + _ => { + p.fatal( + fmt!( + "expected `;` or `}` but found `%s`", + token_to_str(p.reader, © *p.token) + ) + ); } } } @@ -514,7 +538,7 @@ pub impl Parser { let lo = self.span.lo; let mutbl = self.parse_mutability(); let id = self.parse_ident(); - self.expect(token::COLON); + self.expect(&token::COLON); let ty = self.parse_ty(false); spanned( lo, @@ -527,9 +551,9 @@ pub impl Parser { } fn parse_ret_ty() -> (ret_style, @Ty) { - return if self.eat(token::RARROW) { + return if self.eat(&token::RARROW) { let lo = self.span.lo; - if self.eat(token::NOT) { + if self.eat(&token::NOT) { ( noreturn, @Ty { @@ -567,7 +591,7 @@ pub impl Parser { // Parses something like "&x" fn parse_region() -> @region { - self.expect(token::BINOP(token::AND)); + self.expect(&token::BINOP(token::AND)); match *self.token { token::IDENT(sid, _) => { @@ -605,9 +629,12 @@ pub impl Parser { one_tuple = true; } } - let t = if ts.len() == 1 && !one_tuple { ts[0].node } - else { ty_tup(ts) }; - self.expect(token::RPAREN); + let t = if ts.len() == 1 && !one_tuple { + copy ts[0].node + } else { + ty_tup(ts) + }; + self.expect(&token::RPAREN); t } } else if *self.token == token::AT { @@ -621,15 +648,17 @@ pub impl Parser { ty_ptr(self.parse_mt()) } else if *self.token == token::LBRACE { let elems = self.parse_unspanned_seq( - token::LBRACE, token::RBRACE, + &token::LBRACE, + &token::RBRACE, seq_sep_trailing_allowed(token::COMMA), - |p| p.parse_ty_field()); - if vec::len(elems) == 0u { - self.unexpected_last(token::RBRACE); + |p| p.parse_ty_field() + ); + if elems.len() == 0 { + self.unexpected_last(&token::RBRACE); } ty_rec(elems) } else if *self.token == token::LBRACKET { - self.expect(token::LBRACKET); + self.expect(&token::LBRACKET); let mt = self.parse_mt(); if mt.mutbl == m_mutbl { // `m_const` too after snapshot self.obsolete(*self.last_span, ObsoleteMutVector); @@ -640,20 +669,22 @@ pub impl Parser { None => ty_vec(mt), Some(suffix) => ty_fixed_length_vec(mt, suffix) }; - self.expect(token::RBRACKET); + self.expect(&token::RBRACKET); t } else if *self.token == token::BINOP(token::AND) { self.bump(); self.parse_borrowed_pointee() - } else if self.eat_keyword(~"extern") { + } else if self.eat_keyword(&~"extern") { self.parse_ty_bare_fn() - } else if self.token_is_closure_keyword(*self.token) { + } else if self.token_is_closure_keyword(© *self.token) { self.parse_ty_closure(None, None) } else if *self.token == token::MOD_SEP - || is_ident_or_path(*self.token) { + || is_ident_or_path(&*self.token) { let path = self.parse_path_with_tps(colons_before_params); ty_path(path, self.get_id()) - } else { self.fatal(~"expected type"); }; + } else { + self.fatal(~"expected type"); + }; let sp = mk_sp(lo, self.last_span.hi); @Ty {id: self.get_id(), node: t, span: sp} @@ -672,10 +703,12 @@ pub impl Parser { token::IDENT(rname, _) => { if self.look_ahead(1u) == token::BINOP(token::SLASH) && - self.token_is_closure_keyword(self.look_ahead(2u)) { - self.bump(); self.bump(); + self.token_is_closure_keyword(&self.look_ahead(2u)) + { + self.bump(); + self.bump(); return self.parse_ty_closure(Some(sigil), Some(rname)); - } else if self.token_is_closure_keyword(*self.token) { + } else if self.token_is_closure_keyword(© *self.token) { return self.parse_ty_closure(Some(sigil), None); } } @@ -715,7 +748,7 @@ pub impl Parser { _ => { None } }; - if self.token_is_closure_keyword(*self.token) { + if self.token_is_closure_keyword(© *self.token) { return self.parse_ty_closure(Some(BorrowedSigil), rname); } @@ -725,12 +758,12 @@ pub impl Parser { } fn parse_arg_mode() -> mode { - if self.eat(token::BINOP(token::MINUS)) { + if self.eat(&token::BINOP(token::MINUS)) { expl(by_copy) // NDM outdated syntax - } else if self.eat(token::ANDAND) { + } else if self.eat(&token::ANDAND) { expl(by_ref) - } else if self.eat(token::BINOP(token::PLUS)) { - if self.eat(token::BINOP(token::PLUS)) { + } else if self.eat(&token::BINOP(token::PLUS)) { + if self.eat(&token::BINOP(token::PLUS)) { expl(by_val) } else { expl(by_copy) @@ -755,10 +788,10 @@ pub impl Parser { } } else { 0 }; if offset == 0 { - is_plain_ident(*self.token) + is_plain_ident(&*self.token) && self.look_ahead(1) == token::COLON } else { - is_plain_ident(self.look_ahead(offset)) + is_plain_ident(&self.look_ahead(offset)) && self.look_ahead(offset + 1) == token::COLON } } @@ -770,9 +803,9 @@ pub impl Parser { let mut is_mutbl = false; let pat = if require_name || self.is_named_argument() { m = self.parse_arg_mode(); - is_mutbl = self.eat_keyword(~"mut"); + is_mutbl = self.eat_keyword(&~"mut"); let pat = self.parse_pat(false); - self.expect(token::COLON); + self.expect(&token::COLON); pat } else { m = infer(self.get_id()); @@ -793,9 +826,9 @@ pub impl Parser { fn parse_fn_block_arg() -> arg_or_capture_item { let m = self.parse_arg_mode(); - let is_mutbl = self.eat_keyword(~"mut"); + let is_mutbl = self.eat_keyword(&~"mut"); let pat = self.parse_pat(false); - let t = if self.eat(token::COLON) { + let t = if self.eat(&token::COLON) { self.parse_ty(false) } else { @Ty { @@ -814,7 +847,7 @@ pub impl Parser { } fn maybe_parse_fixed_vstore_with_star() -> Option { - if self.eat(token::BINOP(token::STAR)) { + if self.eat(&token::BINOP(token::STAR)) { match *self.token { token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 => { self.bump(); @@ -822,9 +855,12 @@ pub impl Parser { } _ => { self.fatal( - fmt!("expected integral vector length \ - but found `%s`", - token_to_str(self.reader, *self.token))); + fmt!( + "expected integral vector length \ + but found `%s`", + token_to_str(self.reader, © *self.token) + ) + ); } } } else { @@ -832,31 +868,31 @@ pub impl Parser { } } - fn lit_from_token(tok: token::Token) -> lit_ { - match tok { - token::LIT_INT(i, it) => lit_int(i, it), - token::LIT_UINT(u, ut) => lit_uint(u, ut), - token::LIT_INT_UNSUFFIXED(i) => lit_int_unsuffixed(i), - token::LIT_FLOAT(s, ft) => lit_float(self.id_to_str(s), ft), - token::LIT_FLOAT_UNSUFFIXED(s) => - lit_float_unsuffixed(self.id_to_str(s)), - token::LIT_STR(s) => lit_str(self.id_to_str(s)), - token::LPAREN => { self.expect(token::RPAREN); lit_nil }, - _ => { self.unexpected_last(tok); } + fn lit_from_token(tok: &token::Token) -> lit_ { + match *tok { + token::LIT_INT(i, it) => lit_int(i, it), + token::LIT_UINT(u, ut) => lit_uint(u, ut), + token::LIT_INT_UNSUFFIXED(i) => lit_int_unsuffixed(i), + token::LIT_FLOAT(s, ft) => lit_float(self.id_to_str(s), ft), + token::LIT_FLOAT_UNSUFFIXED(s) => + lit_float_unsuffixed(self.id_to_str(s)), + token::LIT_STR(s) => lit_str(self.id_to_str(s)), + token::LPAREN => { self.expect(&token::RPAREN); lit_nil }, + _ => { self.unexpected_last(tok); } } } fn parse_lit() -> lit { let lo = self.span.lo; - let lit = if self.eat_keyword(~"true") { + let lit = if self.eat_keyword(&~"true") { lit_bool(true) - } else if self.eat_keyword(~"false") { + } else if self.eat_keyword(&~"false") { lit_bool(false) } else { // XXX: This is a really bad copy! - let tok = *self.token; + let tok = copy *self.token; self.bump(); - self.lit_from_token(tok) + self.lit_from_token(&tok) }; codemap::spanned { node: lit, span: mk_sp(lo, self.last_span.hi) } } @@ -867,12 +903,12 @@ pub impl Parser { } fn parse_path_without_tps_( - parse_ident: fn(Parser) -> ident, - parse_last_ident: fn(Parser) -> ident) -> @path { + parse_ident: fn(&Parser) -> ident, + parse_last_ident: fn(&Parser) -> ident) -> @path { maybe_whole!(self, nt_path); let lo = self.span.lo; - let global = self.eat(token::MOD_SEP); + let global = self.eat(&token::MOD_SEP); let mut ids = ~[]; loop { let is_not_last = @@ -880,10 +916,10 @@ pub impl Parser { && self.look_ahead(1u) == token::MOD_SEP; if is_not_last { - ids.push(parse_ident(self)); - self.expect(token::MOD_SEP); + ids.push(parse_ident(&self)); + self.expect(&token::MOD_SEP); } else { - ids.push(parse_last_ident(self)); + ids.push(parse_last_ident(&self)); break; } } @@ -905,7 +941,7 @@ pub impl Parser { maybe_whole!(self, nt_path); let lo = self.span.lo; let path = self.parse_path_without_tps(); - if colons && !self.eat(token::MOD_SEP) { + if colons && !self.eat(&token::MOD_SEP) { return path; } @@ -919,7 +955,8 @@ pub impl Parser { // ought to and have to sort it out later. if *self.token == token::BINOP(token::SLASH) && self.look_ahead(1u) == token::BINOP(token::AND) { - self.expect(token::BINOP(token::SLASH)); + + self.expect(&token::BINOP(token::SLASH)); Some(self.parse_region()) } else { None @@ -933,7 +970,7 @@ pub impl Parser { @ast::path { span: mk_sp(lo, hi), rp: rp, types: tps, - .. *path } + .. copy *path } } fn parse_opt_lifetime() -> Option { @@ -1004,9 +1041,9 @@ pub impl Parser { } fn parse_mutability() -> mutability { - if self.eat_keyword(~"mut") { + if self.eat_keyword(&~"mut") { m_mutbl - } else if self.eat_keyword(~"const") { + } else if self.eat_keyword(&~"const") { m_const } else { m_imm @@ -1017,7 +1054,7 @@ pub impl Parser { let lo = self.span.lo; let m = self.parse_mutability(); let i = self.parse_ident(); - self.expect(sep); + self.expect(&sep); let e = self.parse_expr(); spanned(lo, e.span.hi, ast::field_ { mutbl: m, ident: i, expr: e }) } @@ -1031,7 +1068,7 @@ pub impl Parser { } } - fn mk_mac_expr(+lo: BytePos, +hi: BytePos, m: mac_) -> @expr { + fn mk_mac_expr(+lo: BytePos, +hi: BytePos, +m: mac_) -> @expr { @expr { id: self.get_id(), callee_id: self.get_id(), @@ -1084,7 +1121,7 @@ pub impl Parser { } } hi = self.span.hi; - self.expect(token::RPAREN); + self.expect(&token::RPAREN); return if es.len() == 1 && !one_tuple { self.mk_expr(lo, self.span.hi, expr_paren(es[0])) @@ -1097,23 +1134,23 @@ pub impl Parser { let blk = self.parse_block_tail(lo, default_blk); return self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk)); - } else if token::is_bar(*self.token) { + } else if token::is_bar(&*self.token) { return self.parse_lambda_expr(); - } else if self.eat_keyword(~"if") { + } else if self.eat_keyword(&~"if") { return self.parse_if_expr(); - } else if self.eat_keyword(~"for") { + } else if self.eat_keyword(&~"for") { return self.parse_sugary_call_expr(~"for", ForSugar, expr_loop_body); - } else if self.eat_keyword(~"do") { + } else if self.eat_keyword(&~"do") { return self.parse_sugary_call_expr(~"do", DoSugar, expr_do_body); - } else if self.eat_keyword(~"while") { + } else if self.eat_keyword(&~"while") { return self.parse_while_expr(); - } else if self.eat_keyword(~"loop") { + } else if self.eat_keyword(&~"loop") { return self.parse_loop_expr(); - } else if self.eat_keyword(~"match") { + } else if self.eat_keyword(&~"match") { return self.parse_match_expr(); - } else if self.eat_keyword(~"fn") { + } else if self.eat_keyword(&~"fn") { let opt_sigil = self.parse_fn_ty_sigil(); let sigil = match opt_sigil { None => { @@ -1122,7 +1159,7 @@ pub impl Parser { Some(p) => { p } }; return self.parse_fn_expr(sigil); - } else if self.eat_keyword(~"unsafe") { + } else if self.eat_keyword(&~"unsafe") { return self.parse_block_expr(lo, unsafe_blk); } else if *self.token == token::LBRACKET { self.bump(); @@ -1144,70 +1181,73 @@ pub impl Parser { self.bump(); self.bump(); let count = self.parse_expr(); - self.expect(token::RBRACKET); + self.expect(&token::RBRACKET); ex = expr_repeat(first_expr, count, mutbl); } else if *self.token == token::COMMA { // Vector with two or more elements. self.bump(); - let remaining_exprs = - self.parse_seq_to_end(token::RBRACKET, - seq_sep_trailing_allowed(token::COMMA), - |p| p.parse_expr()); + let remaining_exprs = self.parse_seq_to_end( + &token::RBRACKET, + seq_sep_trailing_allowed(token::COMMA), + |p| p.parse_expr() + ); ex = expr_vec(~[first_expr] + remaining_exprs, mutbl); } else { // Vector with one element. - self.expect(token::RBRACKET); + self.expect(&token::RBRACKET); ex = expr_vec(~[first_expr], mutbl); } } hi = self.span.hi; - } else if self.eat_keyword(~"log") { - self.expect(token::LPAREN); + } else if self.eat_keyword(&~"log") { + self.expect(&token::LPAREN); let lvl = self.parse_expr(); - self.expect(token::COMMA); + self.expect(&token::COMMA); let e = self.parse_expr(); ex = expr_log(ast::log_other, lvl, e); hi = self.span.hi; - self.expect(token::RPAREN); - } else if self.eat_keyword(~"assert") { + self.expect(&token::RPAREN); + } else if self.eat_keyword(&~"assert") { let e = self.parse_expr(); ex = expr_assert(e); hi = e.span.hi; - } else if self.eat_keyword(~"return") { - if can_begin_expr(*self.token) { + } else if self.eat_keyword(&~"return") { + if can_begin_expr(&*self.token) { let e = self.parse_expr(); hi = e.span.hi; ex = expr_ret(Some(e)); } else { ex = expr_ret(None); } - } else if self.eat_keyword(~"break") { - if is_ident(*self.token) { + } else if self.eat_keyword(&~"break") { + if is_ident(&*self.token) { ex = expr_break(Some(self.parse_ident())); } else { ex = expr_break(None); } hi = self.span.hi; - } else if self.eat_keyword(~"copy") { + } else if self.eat_keyword(&~"copy") { let e = self.parse_expr(); ex = expr_copy(e); hi = e.span.hi; } else if *self.token == token::MOD_SEP || - is_ident(*self.token) && !self.is_keyword(~"true") && - !self.is_keyword(~"false") { + is_ident(&*self.token) && !self.is_keyword(&~"true") && + !self.is_keyword(&~"false") { let pth = self.parse_path_with_tps(true); /* `!`, as an operator, is prefix, so we know this isn't that */ if *self.token == token::NOT { self.bump(); match *self.token { - token::LPAREN | token::LBRACE => {} - _ => self.fatal(~"expected open delimiter") + token::LPAREN | token::LBRACE => {} + _ => self.fatal(~"expected open delimiter") }; - let ket = token::flip_delimiter(*self.token); - let tts = self.parse_unspanned_seq(*self.token, - ket, - seq_sep_none(), - |p| p.parse_token_tree()); + let ket = token::flip_delimiter(&*self.token); + let tts = self.parse_unspanned_seq( + © *self.token, + &ket, + seq_sep_none(), + |p| p.parse_token_tree() + ); let hi = self.span.hi; return self.mk_mac_expr(lo, hi, mac_invoc_tt(pth, tts)); @@ -1225,9 +1265,9 @@ pub impl Parser { break; } - self.expect(token::COMMA); + self.expect(&token::COMMA); - if self.eat(token::DOTDOT) { + if self.eat(&token::DOTDOT) { base = Some(self.parse_expr()); break; } @@ -1240,7 +1280,7 @@ pub impl Parser { } hi = pth.span.hi; - self.expect(token::RBRACE); + self.expect(&token::RBRACE); ex = expr_struct(pth, fields, base); return self.mk_expr(lo, hi, ex); } @@ -1258,7 +1298,7 @@ pub impl Parser { } fn parse_block_expr(lo: BytePos, blk_mode: blk_check_mode) -> @expr { - self.expect(token::LBRACE); + self.expect(&token::LBRACE); let blk = self.parse_block_tail(lo, blk_mode); return self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk)); } @@ -1278,13 +1318,13 @@ pub impl Parser { let mut hi; loop { // expr.f - if self.eat(token::DOT) { + if self.eat(&token::DOT) { match *self.token { token::IDENT(i, _) => { hi = self.span.hi; self.bump(); - let tys = if self.eat(token::MOD_SEP) { - self.expect(token::LT); + let tys = if self.eat(&token::MOD_SEP) { + self.expect(&token::LT); self.parse_generic_values_after_lt() } else { ~[] @@ -1294,9 +1334,11 @@ pub impl Parser { match *self.token { token::LPAREN if self.permits_call() => { let es = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), - |p| p.parse_expr()); + |p| p.parse_expr() + ); hi = self.span.hi; let nd = expr_method_call(e, i, tys, es, NoSugar); @@ -1316,9 +1358,11 @@ pub impl Parser { // expr(...) token::LPAREN if self.permits_call() => { let es = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), - |p| p.parse_expr()); + |p| p.parse_expr() + ); hi = self.span.hi; let nd = expr_call(e, es, NoSugar); @@ -1330,7 +1374,7 @@ pub impl Parser { self.bump(); let ix = self.parse_expr(); hi = ix.span.hi; - self.expect(token::RBRACKET); + self.expect(&token::RBRACKET); e = self.mk_expr(lo, hi, expr_index(e, ix)); } @@ -1347,15 +1391,15 @@ pub impl Parser { || *self.token == token::BINOP(token::PLUS) { let zerok = *self.token == token::BINOP(token::STAR); self.bump(); - return (None, zerok); + (None, zerok) } else { - let sep = *self.token; + let sep = copy *self.token; self.bump(); if *self.token == token::BINOP(token::STAR) || *self.token == token::BINOP(token::PLUS) { let zerok = *self.token == token::BINOP(token::STAR); self.bump(); - return (Some(sep), zerok); + (Some(sep), zerok) } else { self.fatal(~"expected `*` or `+`"); } @@ -1366,13 +1410,17 @@ pub impl Parser { fn parse_token_tree() -> token_tree { maybe_whole!(deref self, nt_tt); - fn parse_non_delim_tt_tok(p: Parser) -> token_tree { + fn parse_non_delim_tt_tok(p: &Parser) -> token_tree { maybe_whole!(deref p, nt_tt); match *p.token { token::RPAREN | token::RBRACE | token::RBRACKET => { - p.fatal(~"incorrect close delimiter: `" - + token_to_str(p.reader, *p.token) + ~"`"); + p.fatal( + fmt!( + "incorrect close delimiter: `%s`", + token_to_str(p.reader, © *p.token) + ) + ); } /* we ought to allow different depths of unquotation */ token::DOLLAR if *p.quote_depth > 0u => { @@ -1380,11 +1428,19 @@ pub impl Parser { let sp = *p.span; if *p.token == token::LPAREN { - let seq = p.parse_seq(token::LPAREN, token::RPAREN, - seq_sep_none(), - |p| p.parse_token_tree()); + let seq = p.parse_seq( + &token::LPAREN, + &token::RPAREN, + seq_sep_none(), + |p| p.parse_token_tree() + ); let (s, z) = p.parse_sep_and_zerok(); - tt_seq(mk_sp(sp.lo ,p.span.hi), seq.node, s, z) + tt_seq( + mk_sp(sp.lo ,p.span.hi), + /*bad*/ copy seq.node, + s, + z + ) } else { tt_nonterminal(sp, p.parse_ident()) } @@ -1396,30 +1452,36 @@ pub impl Parser { } // turn the next token into a tt_tok: - fn parse_any_tt_tok(p: Parser) -> token_tree{ - let res = tt_tok(*p.span, *p.token); + fn parse_any_tt_tok(p: &Parser) -> token_tree{ + let res = tt_tok(*p.span, copy *p.token); p.bump(); res } match *self.token { - token::EOF => { + token::EOF => { self.fatal(~"file ended in the middle of a macro invocation"); - } - token::LPAREN | token::LBRACE | token::LBRACKET => { - // tjc: ?????? - let ket = token::flip_delimiter(*self.token); - tt_delim(vec::append( - // the open delimiter: - ~[parse_any_tt_tok(self)], - vec::append( - self.parse_seq_to_before_end( - ket, seq_sep_none(), - |p| p.parse_token_tree()), - // the close delimiter: - ~[parse_any_tt_tok(self)]))) - } - _ => parse_non_delim_tt_tok(self) + } + token::LPAREN | token::LBRACE | token::LBRACKET => { + // tjc: ?????? + let ket = token::flip_delimiter(&*self.token); + tt_delim( + vec::append( + // the open delimiter: + ~[parse_any_tt_tok(&self)], + vec::append( + self.parse_seq_to_before_end( + &ket, + seq_sep_none(), + |p| p.parse_token_tree() + ), + // the close delimiter: + ~[parse_any_tt_tok(&self)] + ) + ) + ) + } + _ => parse_non_delim_tt_tok(&self) } } @@ -1436,13 +1498,16 @@ pub impl Parser { // the interpolation of matchers maybe_whole!(self, nt_matchers); let name_idx = @mut 0u; - return match *self.token { - token::LBRACE | token::LPAREN | token::LBRACKET => { - self.parse_matcher_subseq(name_idx, *self.token, - // tjc: not sure why we need a copy - token::flip_delimiter(*self.token)) - } - _ => self.fatal(~"expected open delimiter") + match *self.token { + token::LBRACE | token::LPAREN | token::LBRACKET => { + self.parse_matcher_subseq( + name_idx, + &*self.token, + // tjc: not sure why we need a copy + &token::flip_delimiter(&*self.token) + ) + } + _ => self.fatal(~"expected open delimiter") } } @@ -1450,14 +1515,17 @@ pub impl Parser { // This goofy function is necessary to correctly match parens in matchers. // Otherwise, `$( ( )` would be a valid matcher, and `$( () )` would be // invalid. It's similar to common::parse_seq. - fn parse_matcher_subseq(name_idx: @mut uint, bra: token::Token, - ket: token::Token) -> ~[matcher] { + fn parse_matcher_subseq( + name_idx: @mut uint, + bra: &token::Token, + ket: &token::Token + ) -> ~[matcher] { let mut ret_val = ~[]; let mut lparens = 0u; self.expect(bra); - while *self.token != ket || lparens > 0u { + while *self.token != *ket || lparens > 0u { if *self.token == token::LPAREN { lparens += 1u; } if *self.token == token::RPAREN { lparens -= 1u; } ret_val.push(self.parse_matcher(name_idx)); @@ -1475,9 +1543,11 @@ pub impl Parser { self.bump(); if *self.token == token::LPAREN { let name_idx_lo = *name_idx; - let ms = self.parse_matcher_subseq(name_idx, - token::LPAREN, - token::RPAREN); + let ms = self.parse_matcher_subseq( + name_idx, + &token::LPAREN, + &token::RPAREN + ); if ms.len() == 0u { self.fatal(~"repetition body must be nonempty"); } @@ -1485,14 +1555,14 @@ pub impl Parser { match_seq(ms, sep, zerok, name_idx_lo, *name_idx) } else { let bound_to = self.parse_ident(); - self.expect(token::COLON); + self.expect(&token::COLON); let nt_name = self.parse_ident(); let m = match_nonterminal(bound_to, nt_name, *name_idx); *name_idx += 1u; m } } else { - let m = match_tok(*self.token); + let m = match_tok(copy *self.token); self.bump(); m }; @@ -1599,7 +1669,7 @@ pub impl Parser { fn parse_more_binops(lhs: @expr, min_prec: uint) -> @expr { if self.expr_is_complete(lhs) { return lhs; } - let peeked = *self.token; + let peeked = copy *self.token; if peeked == token::BINOP(token::OR) && (*self.restriction == RESTRICT_NO_BAR_OP || *self.restriction == RESTRICT_NO_BAR_OR_DOUBLEBAR_OP) { @@ -1625,7 +1695,7 @@ pub impl Parser { } } None => { - if as_prec > min_prec && self.eat_keyword(~"as") { + if as_prec > min_prec && self.eat_keyword(&~"as") { let rhs = self.parse_ty(true); let _as = self.mk_expr(lhs.span.lo, rhs.span.hi, @@ -1697,7 +1767,7 @@ pub impl Parser { let thn = self.parse_block(); let mut els: Option<@expr> = None; let mut hi = thn.span.hi; - if self.eat_keyword(~"else") { + if self.eat_keyword(&~"else") { let elexpr = self.parse_else_expr(); els = Some(elexpr); hi = elexpr.span.hi; @@ -1771,7 +1841,7 @@ pub impl Parser { } fn parse_else_expr() -> @expr { - if self.eat_keyword(~"if") { + if self.eat_keyword(&~"if") { return self.parse_if_expr(); } else { let blk = self.parse_block(); @@ -1793,7 +1863,7 @@ pub impl Parser { // Turn on the restriction to stop at | or || so we can parse // them as the lambda arguments let e = self.parse_expr_res(RESTRICT_NO_BAR_OR_DOUBLEBAR_OP); - match e.node { + match /*bad*/ copy e.node { expr_call(f, args, NoSugar) => { let block = self.parse_lambda_block_expr(); let last_arg = self.mk_expr(block.span.lo, block.span.hi, @@ -1848,12 +1918,12 @@ pub impl Parser { // loop headers look like 'loop {' or 'loop unsafe {' let is_loop_header = *self.token == token::LBRACE - || (is_ident(*self.token) + || (is_ident(&*self.token) && self.look_ahead(1) == token::LBRACE); // labeled loop headers look like 'loop foo: {' let is_labeled_loop_header = - is_ident(*self.token) - && !self.is_any_keyword(*self.token) + is_ident(&*self.token) + && !self.is_any_keyword(© *self.token) && self.look_ahead(1) == token::COLON; if is_loop_header || is_labeled_loop_header { @@ -1861,7 +1931,7 @@ pub impl Parser { let opt_ident; if is_labeled_loop_header { opt_ident = Some(self.parse_ident()); - self.expect(token::COLON); + self.expect(&token::COLON); } else { opt_ident = None; } @@ -1873,7 +1943,7 @@ pub impl Parser { } else { // This is a 'continue' expression let lo = self.span.lo; - let ex = if is_ident(*self.token) { + let ex = if is_ident(&*self.token) { expr_again(Some(self.parse_ident())) } else { expr_again(None) @@ -1887,13 +1957,13 @@ pub impl Parser { fn looking_at_record_literal() -> bool { let lookahead = self.look_ahead(1); *self.token == token::LBRACE && - (self.token_is_keyword(~"mut", lookahead) || - (is_plain_ident(lookahead) && + (self.token_is_keyword(&~"mut", &lookahead) || + (is_plain_ident(&lookahead) && self.look_ahead(2) == token::COLON)) } fn parse_record_literal() -> expr_ { - self.expect(token::LBRACE); + self.expect(&token::LBRACE); let mut fields = ~[self.parse_field(token::COLON)]; let mut base = None; while *self.token != token::RBRACE { @@ -1908,14 +1978,14 @@ pub impl Parser { break; } - self.expect(token::COMMA); + self.expect(&token::COMMA); if *self.token == token::RBRACE { // record ends by an optional trailing comma break; } fields.push(self.parse_field(token::COLON)); } - self.expect(token::RBRACE); + self.expect(&token::RBRACE); self.warn(~"REC"); return expr_rec(fields, base); } @@ -1923,13 +1993,13 @@ pub impl Parser { fn parse_match_expr() -> @expr { let lo = self.last_span.lo; let discriminant = self.parse_expr(); - self.expect(token::LBRACE); + self.expect(&token::LBRACE); let mut arms: ~[arm] = ~[]; while *self.token != token::RBRACE { let pats = self.parse_pats(); let mut guard = None; - if self.eat_keyword(~"if") { guard = Some(self.parse_expr()); } - self.expect(token::FAT_ARROW); + if self.eat_keyword(&~"if") { guard = Some(self.parse_expr()); } + self.expect(&token::FAT_ARROW); let expr = self.parse_expr_res(RESTRICT_STMT_EXPR); let require_comma = @@ -1937,9 +2007,9 @@ pub impl Parser { && *self.token != token::RBRACE; if require_comma { - self.expect(token::COMMA); + self.expect(&token::COMMA); } else { - self.eat(token::COMMA); + self.eat(&token::COMMA); } let blk = codemap::spanned { @@ -2008,7 +2078,7 @@ pub impl Parser { while *self.token != token::RBRACKET { if first { first = false; } - else { self.expect(token::COMMA); } + else { self.expect(&token::COMMA); } let mut is_tail = false; if *self.token == token::DOTDOT { @@ -2040,14 +2110,17 @@ pub impl Parser { let mut first = true; while *self.token != token::RBRACE { if first { first = false; } - else { self.expect(token::COMMA); } + else { self.expect(&token::COMMA); } if *self.token == token::UNDERSCORE { self.bump(); if *self.token != token::RBRACE { - self.fatal(~"expected `}`, found `" + - token_to_str(self.reader, *self.token) + - ~"`"); + self.fatal( + fmt!( + "expected `}`, found `%s`", + token_to_str(self.reader, © *self.token) + ) + ); } etc = true; break; @@ -2084,7 +2157,7 @@ pub impl Parser { let lo = self.span.lo; let mut hi = self.span.hi; let mut pat; - match *self.token { + match copy *self.token { token::UNDERSCORE => { self.bump(); pat = pat_wild; } token::AT => { self.bump(); @@ -2129,7 +2202,6 @@ pub impl Parser { } _ => pat_uniq(sub) }; - } token::BINOP(token::AND) => { let lo = self.span.lo; @@ -2178,9 +2250,9 @@ pub impl Parser { fields.push(self.parse_pat(refutable)); } } - if fields.len() == 1 { self.expect(token::COMMA); } + if fields.len() == 1 { self.expect(&token::COMMA); } hi = self.span.hi; - self.expect(token::RPAREN); + self.expect(&token::RPAREN); pat = pat_tup(fields); } } @@ -2188,25 +2260,25 @@ pub impl Parser { self.bump(); let (elements, tail) = self.parse_pat_vec_elements(refutable); hi = self.span.hi; - self.expect(token::RBRACKET); + self.expect(&token::RBRACKET); pat = ast::pat_vec(elements, tail); } copy tok => { - if !is_ident_or_path(tok) - || self.is_keyword(~"true") - || self.is_keyword(~"false") + if !is_ident_or_path(&tok) + || self.is_keyword(&~"true") + || self.is_keyword(&~"false") { let val = self.parse_expr_res(RESTRICT_NO_BAR_OP); - if self.eat(token::DOTDOT) { + if self.eat(&token::DOTDOT) { let end = self.parse_expr_res(RESTRICT_NO_BAR_OP); pat = pat_range(val, end); } else { pat = pat_lit(val); } - } else if self.eat_keyword(~"ref") { + } else if self.eat_keyword(&~"ref") { let mutbl = self.parse_mutability(); pat = self.parse_pat_ident(refutable, bind_by_ref(mutbl)); - } else if self.eat_keyword(~"copy") { + } else if self.eat_keyword(&~"copy") { pat = self.parse_pat_ident(refutable, bind_by_copy); } else { // XXX---refutable match bindings should work same as let @@ -2222,10 +2294,10 @@ pub impl Parser { cannot_be_enum_or_struct = true } - if is_plain_ident(*self.token) && cannot_be_enum_or_struct { + if is_plain_ident(&*self.token) && cannot_be_enum_or_struct { let name = self.parse_value_path(); let sub; - if self.eat(token::AT) { + if self.eat(&token::AT) { sub = Some(self.parse_pat(refutable)); } else { sub = None; @@ -2250,14 +2322,17 @@ pub impl Parser { // This is a "top constructor only" pat self.bump(); self.bump(); star_pat = true; - self.expect(token::RPAREN); + self.expect(&token::RPAREN); } _ => { args = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, - seq_sep_trailing_disallowed - (token::COMMA), - |p| p.parse_pat(refutable)); + &token::LPAREN, + &token::RPAREN, + seq_sep_trailing_disallowed( + token::COMMA + ), + |p| p.parse_pat(refutable) + ); } }, _ => () @@ -2288,13 +2363,13 @@ pub impl Parser { fn parse_pat_ident(refutable: bool, binding_mode: ast::binding_mode) -> ast::pat_ { - if !is_plain_ident(*self.token) { + if !is_plain_ident(&*self.token) { self.span_fatal( *self.last_span, ~"expected identifier, found path"); } let name = self.parse_value_path(); - let sub = if self.eat(token::AT) { + let sub = if self.eat(&token::AT) { Some(self.parse_pat(refutable)) } else { None }; @@ -2322,7 +2397,7 @@ pub impl Parser { node: ty_infer, span: mk_sp(lo, lo), }; - if self.eat(token::COLON) { ty = self.parse_ty(false); } + if self.eat(&token::COLON) { ty = self.parse_ty(false); } let init = if allow_init { self.parse_initializer() } else { None }; @spanned( lo, @@ -2338,10 +2413,10 @@ pub impl Parser { } fn parse_let() -> @decl { - let is_mutbl = self.eat_keyword(~"mut"); + let is_mutbl = self.eat_keyword(&~"mut"); let lo = self.span.lo; let mut locals = ~[self.parse_local(is_mutbl, true)]; - while self.eat(token::COMMA) { + while self.eat(&token::COMMA) { locals.push(self.parse_local(is_mutbl, true)); } return @spanned(lo, self.last_span.hi, decl_local(locals)); @@ -2351,14 +2426,14 @@ pub impl Parser { fn parse_instance_var(pr: visibility) -> @struct_field { let mut is_mutbl = struct_immutable; let lo = self.span.lo; - if self.eat_keyword(~"mut") { + if self.eat_keyword(&~"mut") { is_mutbl = struct_mutable; } - if !is_plain_ident(*self.token) { + if !is_plain_ident(&*self.token) { self.fatal(~"expected ident"); } let name = self.parse_ident(); - self.expect(token::COLON); + self.expect(&token::COLON); let ty = self.parse_ty(false); @spanned(lo, self.last_span.hi, ast::struct_field_ { kind: named_field(name, is_mutbl, pr), @@ -2370,7 +2445,7 @@ pub impl Parser { fn parse_stmt(+first_item_attrs: ~[attribute]) -> @stmt { maybe_whole!(self, nt_stmt); - fn check_expected_item(p: Parser, current_attrs: ~[attribute]) { + fn check_expected_item(p: &Parser, current_attrs: &[attribute]) { // If we have attributes then we should have an item if !current_attrs.is_empty() { p.fatal(~"expected item after attrs"); @@ -2378,16 +2453,16 @@ pub impl Parser { } let lo = self.span.lo; - if self.is_keyword(~"let") { - check_expected_item(self, first_item_attrs); - self.expect_keyword(~"let"); + if self.is_keyword(&~"let") { + check_expected_item(&self, first_item_attrs); + self.expect_keyword(&~"let"); let decl = self.parse_let(); return @spanned(lo, decl.span.hi, stmt_decl(decl, self.get_id())); - } else if is_ident(*self.token) - && !self.is_any_keyword(*self.token) + } else if is_ident(&*self.token) + && !self.is_any_keyword(© *self.token) && self.look_ahead(1) == token::NOT { - check_expected_item(self, first_item_attrs); + check_expected_item(&self, first_item_attrs); // Potential trouble: if we allow macros with paths instead of // idents, we'd need to look ahead past the whole path here... @@ -2401,8 +2476,11 @@ pub impl Parser { }; let tts = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, seq_sep_none(), - |p| p.parse_token_tree()); + &token::LPAREN, + &token::RPAREN, + seq_sep_none(), + |p| p.parse_token_tree() + ); let hi = self.span.hi; if id == token::special_idents::invalid { @@ -2423,7 +2501,7 @@ pub impl Parser { let item_attrs = vec::append(first_item_attrs, self.parse_outer_attributes()); - match self.parse_item_or_view_item(item_attrs, + match self.parse_item_or_view_item(/*bad*/ copy item_attrs, true, false, false) { iovi_item(i) => { let mut hi = i.span.hi; @@ -2440,7 +2518,7 @@ pub impl Parser { iovi_none() => { /* fallthrough */ } } - check_expected_item(self, item_attrs); + check_expected_item(&self, item_attrs); // Remainder are line-expr stmts. let e = self.parse_expr_res(RESTRICT_STMT_EXPR); @@ -2464,7 +2542,7 @@ pub impl Parser { maybe_whole!(pair_empty self, nt_block); - fn maybe_parse_inner_attrs_and_next(p: Parser, parse_attrs: bool) -> + fn maybe_parse_inner_attrs_and_next(p: &Parser, parse_attrs: bool) -> (~[attribute], ~[attribute]) { if parse_attrs { p.parse_inner_attrs_and_next() @@ -2474,13 +2552,14 @@ pub impl Parser { } let lo = self.span.lo; - if self.eat_keyword(~"unsafe") { - self.obsolete(*self.span, ObsoleteUnsafeBlock); + if self.eat_keyword(&~"unsafe") { + self.obsolete(copy *self.span, ObsoleteUnsafeBlock); } - self.expect(token::LBRACE); + self.expect(&token::LBRACE); let (inner, next) = - maybe_parse_inner_attrs_and_next(self, parse_attrs); - return (inner, self.parse_block_tail_(lo, default_blk, next)); + maybe_parse_inner_attrs_and_next(&self, parse_attrs); + + (inner, self.parse_block_tail_(lo, default_blk, next)) } fn parse_block_no_value() -> blk { @@ -2539,7 +2618,7 @@ pub impl Parser { self.bump(); stmts.push(@codemap::spanned { node: stmt_semi(e, stmt_id), - .. *stmt}); + .. copy *stmt}); } token::RBRACE => { expr = Some(e); @@ -2547,10 +2626,12 @@ pub impl Parser { copy t => { if classify::stmt_ends_with_semi(*stmt) { self.fatal( - ~"expected `;` or `}` after \ - expression but found `" - + token_to_str(self.reader, t) - + ~"`"); + fmt!( + "expected `;` or `}` after \ + expression but found `%s`", + token_to_str(self.reader, &t) + ) + ); } stmts.push(stmt); } @@ -2563,8 +2644,8 @@ pub impl Parser { token::SEMI => { self.bump(); stmts.push(@codemap::spanned { - node: stmt_mac((*m), true), - .. *stmt}); + node: stmt_mac(copy *m, true), + .. copy *stmt}); } token::RBRACE => { // if a block ends in `m!(arg)` without @@ -2572,7 +2653,7 @@ pub impl Parser { expr = Some( self.mk_mac_expr(stmt.span.lo, stmt.span.hi, - (*m).node)); + copy m.node)); } _ => { stmts.push(stmt); } } @@ -2582,7 +2663,7 @@ pub impl Parser { stmts.push(stmt); if classify::stmt_ends_with_semi(*stmt) { - self.expect(token::SEMI); + self.expect(&token::SEMI); } } } @@ -2612,9 +2693,9 @@ pub impl Parser { } fn parse_optional_purity() -> ast::purity { - if self.eat_keyword(~"pure") { + if self.eat_keyword(&~"pure") { ast::pure_fn - } else if self.eat_keyword(~"unsafe") { + } else if self.eat_keyword(&~"unsafe") { ast::unsafe_fn } else { ast::impure_fn @@ -2622,27 +2703,27 @@ pub impl Parser { } fn parse_optional_onceness() -> ast::Onceness { - if self.eat_keyword(~"once") { ast::Once } else { ast::Many } + if self.eat_keyword(&~"once") { ast::Once } else { ast::Many } } fn parse_optional_ty_param_bounds() -> @OptVec { - if !self.eat(token::COLON) { + if !self.eat(&token::COLON) { return @opt_vec::Empty; } let mut result = opt_vec::Empty; loop { - if self.eat(token::BINOP(token::AND)) { - if self.eat_keyword(~"static") { + if self.eat(&token::BINOP(token::AND)) { + if self.eat_keyword(&~"static") { result.push(RegionTyParamBound); } else { self.span_err(*self.span, ~"`&static` is the only permissible \ region bound here"); } - } else if is_ident(*self.token) { + } else if is_ident(&*self.token) { let maybe_bound = match *self.token { - token::IDENT(sid, _) => { + token::IDENT(copy sid, _) => { match *self.id_to_str(sid) { ~"send" | ~"copy" | @@ -2657,7 +2738,6 @@ pub impl Parser { Some(TraitTyParamBound( self.mk_ty_path(sid))) } - _ => None } } @@ -2678,11 +2758,11 @@ pub impl Parser { break; } - if self.eat(token::BINOP(token::PLUS)) { + if self.eat(&token::BINOP(token::PLUS)) { loop; } - if is_ident_or_path(*self.token) { + if is_ident_or_path(&*self.token) { self.obsolete(*self.span, ObsoleteTraitBoundSeparator); } @@ -2698,20 +2778,19 @@ pub impl Parser { } fn parse_generics() -> ast::Generics { - if self.eat(token::LT) { + if self.eat(&token::LT) { let lifetimes = self.parse_lifetimes(); let ty_params = self.parse_seq_to_gt( Some(token::COMMA), |p| p.parse_ty_param()); - return ast::Generics {lifetimes: lifetimes, - ty_params: ty_params}; + ast::Generics { lifetimes: lifetimes, ty_params: ty_params } } else { - return ast_util::empty_generics(); + ast_util::empty_generics() } } fn parse_generic_values() -> ~[@Ty] { - if !self.eat(token::LT) { + if !self.eat(&token::LT) { ~[] } else { self.parse_generic_values_after_lt() @@ -2726,13 +2805,16 @@ pub impl Parser { opt_vec::take_vec(result) } - fn parse_fn_decl(parse_arg_fn: fn(Parser) -> arg_or_capture_item) + fn parse_fn_decl(parse_arg_fn: fn(&Parser) -> arg_or_capture_item) -> fn_decl { let args_or_capture_items: ~[arg_or_capture_item] = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, - seq_sep_trailing_disallowed(token::COMMA), parse_arg_fn); + &token::LPAREN, + &token::RPAREN, + seq_sep_trailing_disallowed(token::COMMA), + parse_arg_fn + ); let inputs = either::lefts(args_or_capture_items); @@ -2754,23 +2836,29 @@ pub impl Parser { fn expect_self_ident() { if !self.is_self_ident() { - self.fatal(fmt!("expected `self` but found `%s`", - token_to_str(self.reader, *self.token))); + self.fatal( + fmt!( + "expected `self` but found `%s`", + token_to_str(self.reader, © *self.token) + ) + ); } self.bump(); } - fn parse_fn_decl_with_self(parse_arg_fn: - fn(Parser) -> arg_or_capture_item) - -> (self_ty, fn_decl) { - - fn maybe_parse_self_ty(cnstr: fn(+v: mutability) -> ast::self_ty_, - p: Parser) -> ast::self_ty_ { + fn parse_fn_decl_with_self( + parse_arg_fn: + fn(&Parser) -> arg_or_capture_item + ) -> (self_ty, fn_decl) { + fn maybe_parse_self_ty( + cnstr: fn(+v: mutability) -> ast::self_ty_, + p: &Parser + ) -> ast::self_ty_ { // We need to make sure it isn't a mode or a type - if p.token_is_keyword(~"self", p.look_ahead(1)) || - ((p.token_is_keyword(~"const", p.look_ahead(1)) || - p.token_is_keyword(~"mut", p.look_ahead(1))) && - p.token_is_keyword(~"self", p.look_ahead(2))) { + if p.token_is_keyword(&~"self", &p.look_ahead(1)) || + ((p.token_is_keyword(&~"const", &p.look_ahead(1)) || + p.token_is_keyword(&~"mut", &p.look_ahead(1))) && + p.token_is_keyword(&~"self", &p.look_ahead(2))) { p.bump(); let mutability = p.parse_mutability(); @@ -2781,20 +2869,20 @@ pub impl Parser { } } - self.expect(token::LPAREN); + self.expect(&token::LPAREN); // A bit of complexity and lookahead is needed here in order to to be // backwards compatible. let lo = self.span.lo; let self_ty = match *self.token { token::BINOP(token::AND) => { - maybe_parse_self_ty(sty_region, self) + maybe_parse_self_ty(sty_region, &self) } token::AT => { - maybe_parse_self_ty(sty_box, self) + maybe_parse_self_ty(sty_box, &self) } token::TILDE => { - maybe_parse_self_ty(sty_uniq, self) + maybe_parse_self_ty(sty_uniq, &self) } token::IDENT(*) if self.is_self_ident() => { self.bump(); @@ -2812,29 +2900,34 @@ pub impl Parser { token::COMMA => { self.bump(); let sep = seq_sep_trailing_disallowed(token::COMMA); - args_or_capture_items = - self.parse_seq_to_before_end(token::RPAREN, - sep, - parse_arg_fn); + args_or_capture_items = self.parse_seq_to_before_end( + &token::RPAREN, + sep, + parse_arg_fn + ); } token::RPAREN => { args_or_capture_items = ~[]; } _ => { - self.fatal(~"expected `,` or `)`, found `" + - token_to_str(self.reader, *self.token) + - ~"`"); + self.fatal( + fmt!( + "expected `,` or `)`, found `%s`", + token_to_str(self.reader, © *self.token) + ) + ); } } } else { let sep = seq_sep_trailing_disallowed(token::COMMA); - args_or_capture_items = - self.parse_seq_to_before_end(token::RPAREN, - sep, - parse_arg_fn); + args_or_capture_items = self.parse_seq_to_before_end( + &token::RPAREN, + sep, + parse_arg_fn + ); } - self.expect(token::RPAREN); + self.expect(&token::RPAREN); let hi = self.span.hi; @@ -2852,16 +2945,18 @@ pub impl Parser { fn parse_fn_block_decl() -> fn_decl { let inputs_captures = { - if self.eat(token::OROR) { + if self.eat(&token::OROR) { ~[] } else { self.parse_unspanned_seq( - token::BINOP(token::OR), token::BINOP(token::OR), + &token::BINOP(token::OR), + &token::BINOP(token::OR), seq_sep_trailing_disallowed(token::COMMA), - |p| p.parse_fn_block_arg()) + |p| p.parse_fn_block_arg() + ) } }; - let output = if self.eat(token::RARROW) { + let output = if self.eat(&token::RARROW) { self.parse_ty(false) } else { @Ty { id: self.get_id(), node: ty_infer, span: *self.span } @@ -2920,6 +3015,7 @@ pub impl Parser { let self_ty = if is_static { static_sty} else { self_ty }; let (inner_attrs, body) = self.parse_inner_attrs_and_block(true); + let hi = body.span.hi; let attrs = vec::append(attrs, inner_attrs); @ast::method { ident: ident, @@ -2930,7 +3026,7 @@ pub impl Parser { decl: decl, body: body, id: self.get_id(), - span: mk_sp(lo, body.span.hi), + span: mk_sp(lo, hi), self_id: self.get_id(), vis: visa, } @@ -2945,7 +3041,7 @@ pub impl Parser { let traits; if *self.token == token::COLON { self.bump(); - traits = self.parse_trait_ref_list(token::LBRACE); + traits = self.parse_trait_ref_list(&token::LBRACE); } else { traits = ~[]; } @@ -2958,7 +3054,7 @@ pub impl Parser { // impl Foo { ... } // impl ToStr for ~[T] { ... } fn parse_item_impl(visibility: ast::visibility) -> item_info { - fn wrap_path(p: Parser, pt: @path) -> @Ty { + fn wrap_path(p: &Parser, pt: @path) -> @Ty { @Ty { id: p.get_id(), node: ty_path(pt, p.get_id()), @@ -2977,7 +3073,7 @@ pub impl Parser { let mut ty = self.parse_ty(false); // Parse traits, if necessary. - let opt_trait = if self.eat_keyword(~"for") { + let opt_trait = if self.eat_keyword(&~"for") { // New-style trait. Reinterpret the type as a trait. let opt_trait_ref = match ty.node { ty_path(path, node_id) => { @@ -2994,8 +3090,8 @@ pub impl Parser { ty = self.parse_ty(false); opt_trait_ref - } else if self.eat(token::COLON) { - self.obsolete(*self.span, ObsoleteImplSyntax); + } else if self.eat(&token::COLON) { + self.obsolete(copy *self.span, ObsoleteImplSyntax); Some(self.parse_trait_ref()) } else { None @@ -3008,9 +3104,9 @@ pub impl Parser { } let mut meths = ~[]; - if !self.eat(token::SEMI) { - self.expect(token::LBRACE); - while !self.eat(token::RBRACE) { + if !self.eat(&token::SEMI) { + self.expect(&token::LBRACE); + while !self.eat(&token::RBRACE) { meths.push(self.parse_method()); } } @@ -3025,26 +3121,28 @@ pub impl Parser { } } - fn parse_trait_ref_list(ket: token::Token) -> ~[@trait_ref] { + fn parse_trait_ref_list(ket: &token::Token) -> ~[@trait_ref] { self.parse_seq_to_before_end( - ket, seq_sep_none(), - |p| p.parse_trait_ref()) + ket, + seq_sep_none(), + |p| p.parse_trait_ref() + ) } fn parse_item_struct() -> item_info { let class_name = self.parse_value_ident(); self.parse_region_param(); let generics = self.parse_generics(); - if self.eat(token::COLON) { - self.obsolete(*self.span, ObsoleteClassTraits); - let _ = self.parse_trait_ref_list(token::LBRACE); + if self.eat(&token::COLON) { + self.obsolete(copy *self.span, ObsoleteClassTraits); + let _ = self.parse_trait_ref_list(&token::LBRACE); } let mut fields: ~[@struct_field]; let mut the_dtor: Option<(blk, ~[attribute], codemap::span)> = None; let is_tuple_like; - if self.eat(token::LBRACE) { + if self.eat(&token::LBRACE) { // It's a record-like struct. is_tuple_like = false; fields = ~[]; @@ -3060,7 +3158,7 @@ pub impl Parser { declared here"); } None => { - the_dtor = Some(((*blk), (*attrs), s)); + the_dtor = Some((copy *blk, copy *attrs, s)); } } } @@ -3075,9 +3173,11 @@ pub impl Parser { } else if *self.token == token::LPAREN { // It's a tuple-like struct. is_tuple_like = true; - fields = do self.parse_unspanned_seq(token::LPAREN, token::RPAREN, - seq_sep_trailing_allowed - (token::COMMA)) |p| { + fields = do self.parse_unspanned_seq( + &token::LPAREN, + &token::RPAREN, + seq_sep_trailing_allowed(token::COMMA) + ) |p| { let lo = p.span.lo; let struct_field_ = ast::struct_field_ { kind: unnamed_field, @@ -3086,19 +3186,23 @@ pub impl Parser { }; @spanned(lo, p.span.hi, struct_field_) }; - self.expect(token::SEMI); - } else if self.eat(token::SEMI) { + self.expect(&token::SEMI); + } else if self.eat(&token::SEMI) { // It's a unit-like struct. is_tuple_like = true; fields = ~[]; } else { - self.fatal(fmt!("expected `{`, `(`, or `;` after struct name \ - but found `%s`", - token_to_str(self.reader, *self.token))); + self.fatal( + fmt!( + "expected `{`, `(`, or `;` after struct name \ + but found `%s`", + token_to_str(self.reader, © *self.token) + ) + ); } let actual_dtor = do the_dtor.map |dtor| { - let (d_body, d_attrs, d_s) = *dtor; + let (d_body, d_attrs, d_s) = copy *dtor; codemap::spanned { node: ast::struct_dtor_ { id: self.get_id(), attrs: d_attrs, self_id: self.get_id(), @@ -3129,26 +3233,28 @@ pub impl Parser { let a_var = self.parse_instance_var(vis); match *self.token { - token::SEMI => { - self.obsolete(*self.span, ObsoleteFieldTerminator); - self.bump(); - } - token::COMMA => { - self.bump(); - } - token::RBRACE => {} - _ => { - self.span_fatal(*self.span, - fmt!("expected `;`, `,`, or '}' but \ - found `%s`", - token_to_str(self.reader, - *self.token))); - } + token::SEMI => { + self.obsolete(copy *self.span, ObsoleteFieldTerminator); + self.bump(); + } + token::COMMA => { + self.bump(); + } + token::RBRACE => {} + _ => { + self.span_fatal( + copy *self.span, + fmt!( + "expected `;`, `,`, or '}' but found `%s`", + token_to_str(self.reader, © *self.token) + ) + ); + } } a_var } - fn parse_dtor(attrs: ~[attribute]) -> class_contents { + fn parse_dtor(+attrs: ~[attribute]) -> class_contents { let lo = self.last_span.lo; let body = self.parse_block(); dtor_decl(body, attrs, mk_sp(lo, self.last_span.hi)) @@ -3162,11 +3268,11 @@ pub impl Parser { let attrs = self.parse_outer_attributes(); - if self.eat_keyword(~"priv") { + if self.eat_keyword(&~"priv") { return members(~[self.parse_single_class_item(private)]) } - if self.eat_keyword(~"pub") { + if self.eat_keyword(&~"pub") { return members(~[self.parse_single_class_item(public)]); } @@ -3174,7 +3280,7 @@ pub impl Parser { return members(~[]); } - if self.eat_keyword(~"drop") { + if self.eat_keyword(&~"drop") { return self.parse_dtor(attrs); } else { @@ -3183,12 +3289,12 @@ pub impl Parser { } fn parse_visibility() -> visibility { - if self.eat_keyword(~"pub") { public } - else if self.eat_keyword(~"priv") { private } + if self.eat_keyword(&~"pub") { public } + else if self.eat_keyword(&~"priv") { private } else { inherited } } fn parse_staticness() -> bool { - self.eat_keyword(~"static") + self.eat_keyword(&~"static") } // given a termination token and a vector of already-parsed @@ -3214,12 +3320,17 @@ pub impl Parser { while *self.token != term { let mut attrs = self.parse_outer_attributes(); if first { - attrs = vec::append(attrs_remaining, attrs); + attrs = vec::append(/*bad*/ copy attrs_remaining, attrs); first = false; } debug!("parse_mod_items: parse_item_or_view_item(attrs=%?)", attrs); - match self.parse_item_or_view_item(attrs, true, false, true) { + match self.parse_item_or_view_item( + /*bad*/ copy attrs, + true, + false, + true + ) { iovi_item(item) => items.push(item), iovi_view_item(view_item) => { self.span_fatal(view_item.span, ~"view items must be \ @@ -3227,8 +3338,12 @@ pub impl Parser { module"); } _ => { - self.fatal(~"expected item but found `" + - token_to_str(self.reader, *self.token) + ~"`"); + self.fatal( + fmt!( + "expected item but found `%s`", + token_to_str(self.reader, © *self.token) + ) + ); } } debug!("parse_mod_items: attrs=%?", attrs); @@ -3244,11 +3359,11 @@ pub impl Parser { fn parse_item_const() -> item_info { let id = self.parse_value_ident(); - self.expect(token::COLON); + self.expect(&token::COLON); let ty = self.parse_ty(false); - self.expect(token::EQ); + self.expect(&token::EQ); let e = self.parse_expr(); - self.expect(token::SEMI); + self.expect(&token::SEMI); (id, item_const(ty, e), None) } @@ -3262,10 +3377,10 @@ pub impl Parser { (id, m, Some(attrs)) } else { self.push_mod_path(id, outer_attrs); - self.expect(token::LBRACE); + self.expect(&token::LBRACE); let (inner, next) = self.parse_inner_attrs_and_next(); let m = self.parse_mod_items(token::RBRACE, next); - self.expect(token::RBRACE); + self.expect(&token::RBRACE); self.pop_mod_path(); (id, item_mod(m), Some(inner)) }; @@ -3343,7 +3458,7 @@ pub impl Parser { outer_attrs, id_sp) } - fn eval_src_mod_from_path(prefix: Path, path: Path, + fn eval_src_mod_from_path(prefix: Path, +path: Path, outer_attrs: ~[ast::attribute], id_sp: span ) -> (ast::item_, ~[ast::attribute]) { @@ -3355,15 +3470,18 @@ pub impl Parser { }; let full_path = full_path.normalize(); let p0 = - new_sub_parser_from_file(self.sess, self.cfg, + new_sub_parser_from_file(self.sess, copy self.cfg, &full_path, id_sp); let (inner, next) = p0.parse_inner_attrs_and_next(); - let mod_attrs = vec::append(outer_attrs, inner); + let mod_attrs = vec::append( + /*bad*/ copy outer_attrs, + inner + ); let first_item_outer_attrs = next; let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs); return (ast::item_mod(m0), mod_attrs); - fn cdir_path_opt(default: ~str, attrs: ~[ast::attribute]) -> ~str { + fn cdir_path_opt(+default: ~str, attrs: ~[ast::attribute]) -> ~str { match ::attr::first_attr_value_str_by_name(attrs, ~"path") { Some(d) => copy *d, None => default @@ -3378,7 +3496,7 @@ pub impl Parser { let (ident, generics) = self.parse_fn_header(); let decl = self.parse_fn_decl(|p| p.parse_arg()); let mut hi = self.span.hi; - self.expect(token::SEMI); + self.expect(&token::SEMI); @ast::foreign_item { ident: ident, attrs: attrs, node: foreign_item_fn(decl, purity, generics), @@ -3390,12 +3508,12 @@ pub impl Parser { fn parse_item_foreign_const(vis: ast::visibility, +attrs: ~[attribute]) -> @foreign_item { let lo = self.span.lo; - self.expect_keyword(~"const"); + self.expect_keyword(&~"const"); let ident = self.parse_ident(); - self.expect(token::COLON); + self.expect(&token::COLON); let ty = self.parse_ty(false); let hi = self.span.hi; - self.expect(token::SEMI); + self.expect(&token::SEMI); @ast::foreign_item { ident: ident, attrs: attrs, node: foreign_item_const(ty), @@ -3405,12 +3523,12 @@ pub impl Parser { } fn parse_fn_purity() -> purity { - if self.eat_keyword(~"fn") { impure_fn } - else if self.eat_keyword(~"pure") { - self.expect_keyword(~"fn"); + if self.eat_keyword(&~"fn") { impure_fn } + else if self.eat_keyword(&~"pure") { + self.expect_keyword(&~"fn"); pure_fn - } else if self.eat_keyword(~"unsafe") { - self.expect_keyword(~"fn"); + } else if self.eat_keyword(&~"unsafe") { + self.expect_keyword(&~"fn"); unsafe_fn } else { self.unexpected(); } @@ -3418,7 +3536,7 @@ pub impl Parser { fn parse_foreign_item(+attrs: ~[attribute]) -> @foreign_item { let vis = self.parse_visibility(); - if self.is_keyword(~"const") { + if self.is_keyword(&~"const") { self.parse_item_foreign_const(vis, attrs) } else { self.parse_item_foreign_fn(attrs) @@ -3474,24 +3592,30 @@ pub impl Parser { } let mut must_be_named_mod = false; - if self.is_keyword(~"mod") { + if self.is_keyword(&~"mod") { must_be_named_mod = true; - self.expect_keyword(~"mod"); + self.expect_keyword(&~"mod"); } else if *self.token != token::LBRACE { - self.span_fatal(*self.span, - fmt!("expected `{` or `mod` but found %s", - token_to_str(self.reader, *self.token))); + self.span_fatal( + copy *self.span, + fmt!( + "expected `{` or `mod` but found `%s`", + token_to_str(self.reader, © *self.token) + ) + ); } let (sort, ident) = match *self.token { token::IDENT(*) => (ast::named, self.parse_ident()), _ => { if must_be_named_mod { - self.span_fatal(*self.span, - fmt!("expected foreign module name but \ - found %s", - token_to_str(self.reader, - *self.token))); + self.span_fatal( + copy *self.span, + fmt!( + "expected foreign module name but found `%s`", + token_to_str(self.reader, © *self.token) + ) + ); } (ast::anonymous, @@ -3500,7 +3624,7 @@ pub impl Parser { }; // extern mod { ... } - if items_allowed && self.eat(token::LBRACE) { + if items_allowed && self.eat(&token::LBRACE) { let abi; match abi_opt { Some(found_abi) => abi = found_abi, @@ -3509,11 +3633,12 @@ pub impl Parser { let (inner, next) = self.parse_inner_attrs_and_next(); let m = self.parse_foreign_mod_items(sort, abi, next); - self.expect(token::RBRACE); + self.expect(&token::RBRACE); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, - item_foreign_mod(m), visibility, - maybe_append(attrs, Some(inner)))); + item_foreign_mod(m), visibility, + maybe_append(/*bad*/ copy attrs, + Some(inner)))); } match abi_opt { @@ -3526,10 +3651,10 @@ pub impl Parser { // extern mod foo; let metadata = self.parse_optional_meta(); - self.expect(token::SEMI); + self.expect(&token::SEMI); iovi_view_item(@ast::view_item { node: view_item_extern_mod(ident, metadata, self.get_id()), - attrs: attrs, + attrs: copy attrs, vis: visibility, span: mk_sp(lo, self.last_span.hi) }) @@ -3545,15 +3670,15 @@ pub impl Parser { let (_, ident) = self.parse_type_decl(); self.parse_region_param(); let tps = self.parse_generics(); - self.expect(token::EQ); + self.expect(&token::EQ); let ty = self.parse_ty(false); - self.expect(token::SEMI); + self.expect(&token::SEMI); (ident, item_ty(ty, tps), None) } fn parse_region_param() { - if self.eat(token::BINOP(token::SLASH)) { - self.expect(token::BINOP(token::AND)); + if self.eat(&token::BINOP(token::SLASH)) { + self.expect(&token::BINOP(token::AND)); } } @@ -3572,7 +3697,7 @@ pub impl Parser { declared here"); } None => { - the_dtor = Some(((*blk), (*attrs), s)); + the_dtor = Some((copy *blk, copy *attrs, s)); } } } @@ -3585,7 +3710,7 @@ pub impl Parser { } self.bump(); let mut actual_dtor = do the_dtor.map |dtor| { - let (d_body, d_attrs, d_s) = *dtor; + let (d_body, d_attrs, d_s) = copy *dtor; codemap::spanned { node: ast::struct_dtor_ { id: self.get_id(), attrs: d_attrs, self_id: self.get_id(), @@ -3600,8 +3725,8 @@ pub impl Parser { }; } - fn parse_enum_def(+generics: ast::Generics) -> enum_def { - let mut variants: ~[variant] = ~[]; + fn parse_enum_def(generics: &ast::Generics) -> enum_def { + let mut variants = ~[]; let mut all_nullary = true, have_disr = false; let mut common_fields = None; @@ -3610,11 +3735,11 @@ pub impl Parser { let vlo = self.span.lo; // Is this a common field declaration? - if self.eat_keyword(~"struct") { + if self.eat_keyword(&~"struct") { if common_fields.is_some() { self.fatal(~"duplicate declaration of shared fields"); } - self.expect(token::LBRACE); + self.expect(&token::LBRACE); common_fields = Some(self.parse_struct_def()); loop; } @@ -3624,24 +3749,26 @@ pub impl Parser { // Is this a nested enum declaration? let ident, needs_comma, kind; let mut args = ~[], disr_expr = None; - if self.eat_keyword(~"enum") { + if self.eat_keyword(&~"enum") { ident = self.parse_ident(); - self.expect(token::LBRACE); + self.expect(&token::LBRACE); let nested_enum_def = self.parse_enum_def(generics); kind = enum_variant_kind(nested_enum_def); needs_comma = false; } else { ident = self.parse_value_ident(); - if self.eat(token::LBRACE) { + if self.eat(&token::LBRACE) { // Parse a struct variant. all_nullary = false; kind = struct_variant_kind(self.parse_struct_def()); } else if *self.token == token::LPAREN { all_nullary = false; let arg_tys = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), - |p| p.parse_ty(false)); + |p| p.parse_ty(false) + ); for arg_tys.each |ty| { args.push(ast::variant_arg { ty: *ty, @@ -3649,7 +3776,7 @@ pub impl Parser { }); } kind = tuple_variant_kind(args); - } else if self.eat(token::EQ) { + } else if self.eat(&token::EQ) { have_disr = true; disr_expr = Some(self.parse_expr()); kind = tuple_variant_kind(args); @@ -3669,9 +3796,9 @@ pub impl Parser { }; variants.push(spanned(vlo, self.last_span.hi, vr)); - if needs_comma && !self.eat(token::COMMA) { break; } + if needs_comma && !self.eat(&token::COMMA) { break; } } - self.expect(token::RBRACE); + self.expect(&token::RBRACE); if (have_disr && !all_nullary) { self.fatal(~"discriminator values can only be used with a c-like \ enum"); @@ -3688,7 +3815,7 @@ pub impl Parser { if *self.token == token::EQ { self.bump(); let ty = self.parse_ty(false); - self.expect(token::SEMI); + self.expect(&token::SEMI); let variant = spanned(ty.span.lo, ty.span.hi, ast::variant_ { name: id, attrs: ~[], @@ -3710,9 +3837,9 @@ pub impl Parser { None ); } - self.expect(token::LBRACE); + self.expect(&token::LBRACE); - let enum_definition = self.parse_enum_def(generics); + let enum_definition = self.parse_enum_def(&generics); (id, item_enum(enum_definition, generics), None) } @@ -3755,57 +3882,57 @@ pub impl Parser { let lo = self.span.lo; let visibility; - if self.eat_keyword(~"pub") { + if self.eat_keyword(&~"pub") { visibility = public; - } else if self.eat_keyword(~"priv") { + } else if self.eat_keyword(&~"priv") { visibility = private; } else { visibility = inherited; } - if items_allowed && self.eat_keyword(~"const") { + if items_allowed && self.eat_keyword(&~"const") { // CONST ITEM let (ident, item_, extra_attrs) = self.parse_item_const(); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if foreign_items_allowed && self.is_keyword(~"const") { + } else if foreign_items_allowed && self.is_keyword(&~"const") { // FOREIGN CONST ITEM let item = self.parse_item_foreign_const(visibility, attrs); return iovi_foreign_item(item); } else if items_allowed && // FUNCTION ITEM (not sure about lookahead condition...) - self.is_keyword(~"fn") && + self.is_keyword(&~"fn") && !self.fn_expr_lookahead(self.look_ahead(1u)) { self.bump(); let (ident, item_, extra_attrs) = self.parse_item_fn(impure_fn); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if items_allowed && self.eat_keyword(~"pure") { + } else if items_allowed && self.eat_keyword(&~"pure") { // PURE FUNCTION ITEM - self.expect_keyword(~"fn"); + self.expect_keyword(&~"fn"); let (ident, item_, extra_attrs) = self.parse_item_fn(pure_fn); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); } else if foreign_items_allowed && - (self.is_keyword(~"fn") || self.is_keyword(~"pure") || - self.is_keyword(~"unsafe")) { + (self.is_keyword(&~"fn") || self.is_keyword(&~"pure") || + self.is_keyword(&~"unsafe")) { // FOREIGN FUNCTION ITEM (no items allowed) let item = self.parse_item_foreign_fn(attrs); return iovi_foreign_item(item); - } else if items_allowed && self.is_keyword(~"unsafe") + } else if items_allowed && self.is_keyword(&~"unsafe") && self.look_ahead(1u) != token::LBRACE { // UNSAFE FUNCTION ITEM (where items are allowed) self.bump(); - self.expect_keyword(~"fn"); + self.expect_keyword(&~"fn"); let (ident, item_, extra_attrs) = self.parse_item_fn(unsafe_fn); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if self.eat_keyword(~"extern") { - if items_allowed && self.eat_keyword(~"fn") { + } else if self.eat_keyword(&~"extern") { + if items_allowed && self.eat_keyword(&~"fn") { // EXTERN FUNCTION ITEM let (ident, item_, extra_attrs) = self.parse_item_fn(extern_fn); @@ -3817,56 +3944,56 @@ pub impl Parser { // EXTERN MODULE ITEM return self.parse_item_foreign_mod(lo, visibility, attrs, items_allowed); - } else if items_allowed && self.eat_keyword(~"mod") { + } else if items_allowed && self.eat_keyword(&~"mod") { // MODULE ITEM let (ident, item_, extra_attrs) = self.parse_item_mod(attrs); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if items_allowed && self.eat_keyword(~"type") { + } else if items_allowed && self.eat_keyword(&~"type") { // TYPE ITEM let (ident, item_, extra_attrs) = self.parse_item_type(); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if items_allowed && self.eat_keyword(~"enum") { + } else if items_allowed && self.eat_keyword(&~"enum") { // ENUM ITEM let (ident, item_, extra_attrs) = self.parse_item_enum(); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if items_allowed && self.eat_keyword(~"trait") { + } else if items_allowed && self.eat_keyword(&~"trait") { // TRAIT ITEM let (ident, item_, extra_attrs) = self.parse_item_trait(); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if items_allowed && self.eat_keyword(~"impl") { + } else if items_allowed && self.eat_keyword(&~"impl") { // IMPL ITEM let (ident, item_, extra_attrs) = self.parse_item_impl(visibility); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if items_allowed && self.eat_keyword(~"struct") { + } else if items_allowed && self.eat_keyword(&~"struct") { // STRUCT ITEM let (ident, item_, extra_attrs) = self.parse_item_struct(); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if self.eat_keyword(~"use") { + } else if self.eat_keyword(&~"use") { // USE ITEM let view_item = self.parse_use(); - self.expect(token::SEMI); + self.expect(&token::SEMI); return iovi_view_item(@ast::view_item { node: view_item, attrs: attrs, vis: visibility, span: mk_sp(lo, self.last_span.hi) }); - } else if macros_allowed && !self.is_any_keyword(*self.token) + } else if macros_allowed && !self.is_any_keyword(© *self.token) && self.look_ahead(1) == token::NOT - && (is_plain_ident(self.look_ahead(2)) + && (is_plain_ident(&self.look_ahead(2)) || self.look_ahead(2) == token::LPAREN || self.look_ahead(2) == token::LBRACE) { // MACRO INVOCATION ITEM @@ -3876,25 +4003,28 @@ pub impl Parser { // item macro. let pth = self.parse_path_without_tps(); - self.expect(token::NOT); + self.expect(&token::NOT); // a 'special' identifier (like what `macro_rules!` uses) // is optional. We should eventually unify invoc syntax // and remove this. - let id = if is_plain_ident(*self.token) { + let id = if is_plain_ident(&*self.token) { self.parse_ident() } else { token::special_idents::invalid // no special identifier }; // eat a matched-delimiter token tree: let tts = match *self.token { - token::LPAREN | token::LBRACE => { - let ket = token::flip_delimiter(*self.token); - self.parse_unspanned_seq(*self.token, ket, - seq_sep_none(), - |p| p.parse_token_tree()) - } - _ => self.fatal(~"expected open delimiter") + token::LPAREN | token::LBRACE => { + let ket = token::flip_delimiter(&*self.token); + self.parse_unspanned_seq( + © *self.token, + &ket, + seq_sep_none(), + |p| p.parse_token_tree() + ) + } + _ => self.fatal(~"expected open delimiter") }; // single-variant-enum... : let m = ast::mac_invoc_tt(pth, tts); @@ -3937,7 +4067,7 @@ pub impl Parser { let lo = self.span.lo; let namespace; - if self.eat_keyword(~"mod") { + if self.eat_keyword(&~"mod") { namespace = module_ns; } else { namespace = type_value_ns; @@ -3980,9 +4110,11 @@ pub impl Parser { // foo::bar::{a,b,c} token::LBRACE => { let idents = self.parse_unspanned_seq( - token::LBRACE, token::RBRACE, + &token::LBRACE, + &token::RBRACE, seq_sep_trailing_allowed(token::COMMA), - |p| p.parse_path_list_ident()); + |p| p.parse_path_list_ident() + ); let path = @ast::path { span: mk_sp(lo, self.span.hi), global: false, idents: path, @@ -4031,32 +4163,32 @@ pub impl Parser { fn is_view_item() -> bool { let tok, next_tok; - if !self.is_keyword(~"pub") && !self.is_keyword(~"priv") { - tok = *self.token; + if !self.is_keyword(&~"pub") && !self.is_keyword(&~"priv") { + tok = copy *self.token; next_tok = self.look_ahead(1); } else { tok = self.look_ahead(1); next_tok = self.look_ahead(2); }; - self.token_is_keyword(~"use", tok) - || (self.token_is_keyword(~"extern", tok) && - self.token_is_keyword(~"mod", next_tok)) + self.token_is_keyword(&~"use", &tok) + || (self.token_is_keyword(&~"extern", &tok) && + self.token_is_keyword(&~"mod", &next_tok)) } // parse a view item. fn parse_view_item(+attrs: ~[attribute], vis: visibility) -> @view_item { let lo = self.span.lo; - let node = if self.eat_keyword(~"use") { + let node = if self.eat_keyword(&~"use") { self.parse_use() - } else if self.eat_keyword(~"extern") { - self.expect_keyword(~"mod"); + } else if self.eat_keyword(&~"extern") { + self.expect_keyword(&~"mod"); let ident = self.parse_ident(); let metadata = self.parse_optional_meta(); view_item_extern_mod(ident, metadata, self.get_id()) } else { self.bug(~"expected view item"); }; - self.expect(token::SEMI); + self.expect(&token::SEMI); @ast::view_item { node: node, attrs: attrs, vis: vis, @@ -4090,7 +4222,8 @@ pub impl Parser { let mut (view_items, items, foreign_items) = (~[], ~[], ~[]); loop { - match self.parse_item_or_view_item(attrs, items_allowed, + match self.parse_item_or_view_item(/*bad*/ copy attrs, + items_allowed, foreign_items_allowed, macros_allowed) { iovi_none => @@ -4139,7 +4272,7 @@ pub impl Parser { @spanned(lo, self.span.lo, ast::crate_ { module: m, attrs: inner, - config: self.cfg }) + config: copy self.cfg }) } fn parse_str() -> @~str { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 207f6d499159f..6d0ca2c665701 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -117,7 +117,7 @@ pub enum nonterminal { nt_matchers(~[ast::matcher]) } -pub fn binop_to_str(o: binop) -> ~str { +pub pure fn binop_to_str(o: binop) -> ~str { match o { PLUS => ~"+", MINUS => ~"-", @@ -132,8 +132,8 @@ pub fn binop_to_str(o: binop) -> ~str { } } -pub fn to_str(in: @ident_interner, t: Token) -> ~str { - match t { +pub fn to_str(in: @ident_interner, t: &Token) -> ~str { + match *t { EQ => ~"=", LT => ~"<", LE => ~"<=", @@ -181,14 +181,14 @@ pub fn to_str(in: @ident_interner, t: Token) -> ~str { } LIT_INT_UNSUFFIXED(i) => { i.to_str() } LIT_FLOAT(s, t) => { - let mut body = *in.get(s); + let mut body = copy *in.get(s); if body.ends_with(~".") { body = body + ~"0"; // `10.f` is not a float literal } body + ast_util::float_ty_to_str(t) } LIT_FLOAT_UNSUFFIXED(s) => { - let mut body = *in.get(s); + let mut body = copy *in.get(s); if body.ends_with(~".") { body = body + ~"0"; // `10.f` is not a float literal } @@ -197,12 +197,12 @@ pub fn to_str(in: @ident_interner, t: Token) -> ~str { LIT_STR(s) => { ~"\"" + str::escape_default(*in.get(s)) + ~"\"" } /* Name components */ - IDENT(s, _) => *in.get(s), + IDENT(s, _) => copy *in.get(s), LIFETIME(s) => fmt!("'%s", *in.get(s)), UNDERSCORE => ~"_", /* Other */ - DOC_COMMENT(s) => *in.get(s), + DOC_COMMENT(s) => copy *in.get(s), EOF => ~"", INTERPOLATED(ref nt) => { match nt { @@ -227,8 +227,8 @@ pub fn to_str(in: @ident_interner, t: Token) -> ~str { } } -pub pure fn can_begin_expr(t: Token) -> bool { - match t { +pub pure fn can_begin_expr(t: &Token) -> bool { + match *t { LPAREN => true, LBRACE => true, LBRACKET => true, @@ -259,22 +259,22 @@ pub pure fn can_begin_expr(t: Token) -> bool { } /// what's the opposite delimiter? -pub fn flip_delimiter(t: token::Token) -> token::Token { - match t { - token::LPAREN => token::RPAREN, - token::LBRACE => token::RBRACE, - token::LBRACKET => token::RBRACKET, - token::RPAREN => token::LPAREN, - token::RBRACE => token::LBRACE, - token::RBRACKET => token::LBRACKET, +pub fn flip_delimiter(t: &token::Token) -> token::Token { + match *t { + LPAREN => RPAREN, + LBRACE => RBRACE, + LBRACKET => RBRACKET, + RPAREN => LPAREN, + RBRACE => LBRACE, + RBRACKET => LBRACKET, _ => fail!() } } -pub fn is_lit(t: Token) -> bool { - match t { +pub fn is_lit(t: &Token) -> bool { + match *t { LIT_INT(_, _) => true, LIT_UINT(_, _) => true, LIT_INT_UNSUFFIXED(_) => true, @@ -285,23 +285,23 @@ pub fn is_lit(t: Token) -> bool { } } -pub pure fn is_ident(t: Token) -> bool { - match t { IDENT(_, _) => true, _ => false } +pub pure fn is_ident(t: &Token) -> bool { + match *t { IDENT(_, _) => true, _ => false } } -pub pure fn is_ident_or_path(t: Token) -> bool { - match t { +pub pure fn is_ident_or_path(t: &Token) -> bool { + match *t { IDENT(_, _) | INTERPOLATED(nt_path(*)) => true, _ => false } } -pub pure fn is_plain_ident(t: Token) -> bool { - match t { IDENT(_, false) => true, _ => false } +pub pure fn is_plain_ident(t: &Token) -> bool { + match *t { IDENT(_, false) => true, _ => false } } -pub pure fn is_bar(t: Token) -> bool { - match t { BINOP(OR) | OROR => true, _ => false } +pub pure fn is_bar(t: &Token) -> bool { + match *t { BINOP(OR) | OROR => true, _ => false } } @@ -478,7 +478,7 @@ pub fn temporary_keyword_table() -> HashMap<~str, ()> { ~"self", ~"static", ]; for keys.each |word| { - words.insert(*word, ()); + words.insert(copy *word, ()); } words } @@ -505,7 +505,7 @@ pub fn strict_keyword_table() -> HashMap<~str, ()> { ~"while" ]; for keys.each |word| { - words.insert(*word, ()); + words.insert(copy *word, ()); } words } @@ -516,7 +516,7 @@ pub fn reserved_keyword_table() -> HashMap<~str, ()> { ~"be" ]; for keys.each |word| { - words.insert(*word, ()); + words.insert(copy *word, ()); } words } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 402c7c2663465..1dc45b1a4bd9a 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -568,15 +568,15 @@ pub fn end(p: @mut Printer) { p.pretty_print(END); } pub fn eof(p: @mut Printer) { p.pretty_print(EOF); } pub fn word(p: @mut Printer, wrd: ~str) { - p.pretty_print(STRING(@wrd, str::len(wrd) as int)); + p.pretty_print(STRING(@/*bad*/ copy wrd, wrd.len() as int)); } pub fn huge_word(p: @mut Printer, wrd: ~str) { - p.pretty_print(STRING(@wrd, size_infinity)); + p.pretty_print(STRING(@/*bad*/ copy wrd, size_infinity)); } pub fn zero_word(p: @mut Printer, wrd: ~str) { - p.pretty_print(STRING(@wrd, 0)); + p.pretty_print(STRING(@/*bad*/ copy wrd, 0)); } pub fn spaces(p: @mut Printer, n: uint) { break_offset(p, n, 0); } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 2ce0f987a3b58..282980f0faaea 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -40,8 +40,8 @@ use core::u64; use core::vec; // The @ps is stored here to prevent recursive type. -pub enum ann_node { - node_block(@ps, ast::blk), +pub enum ann_node/& { + node_block(@ps, &ast::blk), node_item(@ps, @ast::item), node_expr(@ps, @ast::expr), node_pat(@ps, @ast::pat), @@ -110,18 +110,20 @@ pub fn print_crate(cm: @CodeMap, intr: @ident_interner, span_diagnostic: diagnostic::span_handler, crate: @ast::crate, filename: ~str, in: io::Reader, out: io::Writer, ann: pp_ann, is_expanded: bool) { - let (cmnts, lits) = - comments::gather_comments_and_literals(span_diagnostic, - filename, in); + let (cmnts, lits) = comments::gather_comments_and_literals( + span_diagnostic, + copy filename, + in + ); let s = @ps { s: pp::mk_printer(out, default_columns), cm: Some(cm), intr: intr, - comments: Some(cmnts), + comments: Some(copy cmnts), // If the code is post expansion, don't use the table of // literals, since it doesn't correspond with the literals // in the AST anymore. - literals: if is_expanded { None } else { Some(lits) }, + literals: if is_expanded { None } else { Some(copy lits) }, cur_cmnt_and_lit: @mut CurrentCommentAndLiteral { cur_cmnt: 0, cur_lit: 0 @@ -175,7 +177,7 @@ pub fn path_to_str(&&p: @ast::path, intr: @ident_interner) -> ~str { to_str(p, |a,b| print_path(a, b, false), intr) } -pub fn fun_to_str(decl: ast::fn_decl, name: ast::ident, +pub fn fun_to_str(decl: &ast::fn_decl, name: ast::ident, generics: &ast::Generics, intr: @ident_interner) -> ~str { do io::with_str_writer |wr| { let s = rust_printer(wr, intr); @@ -186,7 +188,7 @@ pub fn fun_to_str(decl: ast::fn_decl, name: ast::ident, } } -pub fn block_to_str(blk: ast::blk, intr: @ident_interner) -> ~str { +pub fn block_to_str(blk: &ast::blk, intr: @ident_interner) -> ~str { do io::with_str_writer |wr| { let s = rust_printer(wr, intr); // containing cbox, will be closed by print-block at } @@ -381,7 +383,7 @@ pub fn print_type(s: @ps, &&ty: @ast::Ty) { pub fn print_type_ex(s: @ps, &&ty: @ast::Ty, print_colons: bool) { maybe_print_comment(s, ty.span.lo); ibox(s, 0u); - match ty.node { + match /*bad*/ copy ty.node { ast::ty_nil => word(s.s, ~"()"), ast::ty_bot => word(s.s, ~"!"), ast::ty_box(mt) => { word(s.s, ~"@"); print_mt(s, mt); } @@ -425,12 +427,12 @@ pub fn print_type_ex(s: @ps, &&ty: @ast::Ty, print_colons: bool) { } ast::ty_bare_fn(f) => { print_ty_fn(s, Some(f.abi), None, None, - f.purity, ast::Many, f.decl, None, + f.purity, ast::Many, &f.decl, None, None, None); } ast::ty_closure(f) => { print_ty_fn(s, None, Some(f.sigil), f.region, - f.purity, f.onceness, f.decl, None, + f.purity, f.onceness, &f.decl, None, None, None); } ast::ty_path(path, _) => print_path(s, path, print_colons), @@ -461,8 +463,8 @@ pub fn print_foreign_item(s: @ps, item: @ast::foreign_item) { hardbreak_if_not_bol(s); maybe_print_comment(s, item.span.lo); print_outer_attributes(s, item.attrs); - match item.node { - ast::foreign_item_fn(decl, purity, ref generics) => { + match /*bad*/ copy item.node { + ast::foreign_item_fn(ref decl, purity, ref generics) => { print_fn(s, decl, Some(purity), item.ident, generics, None, ast::inherited); end(s); // end head-ibox @@ -487,7 +489,7 @@ pub fn print_item(s: @ps, &&item: @ast::item) { print_outer_attributes(s, item.attrs); let ann_node = node_item(s, item); (s.ann.pre)(ann_node); - match item.node { + match /*bad*/ copy item.node { ast::item_const(ty, expr) => { head(s, visibility_qualified(item.vis, ~"const")); print_ident(s, item.ident); @@ -505,7 +507,7 @@ pub fn print_item(s: @ps, &&item: @ast::item) { ast::item_fn(ref decl, purity, ref typarams, ref body) => { print_fn( s, - /* FIXME (#2543) */ copy *decl, + decl, Some(purity), item.ident, typarams, @@ -513,7 +515,7 @@ pub fn print_item(s: @ps, &&item: @ast::item) { item.vis ); word(s.s, ~" "); - print_block_with_attrs(s, (*body), item.attrs); + print_block_with_attrs(s, body, item.attrs); } ast::item_mod(_mod) => { head(s, visibility_qualified(item.vis, ~"mod")); @@ -568,7 +570,7 @@ pub fn print_item(s: @ps, &&item: @ast::item) { print_struct(s, struct_def, generics, item.ident, item.span); } - ast::item_impl(ref generics, opt_trait, ty, methods) => { + ast::item_impl(ref generics, opt_trait, ty, ref methods) => { head(s, visibility_qualified(item.vis, ~"impl")); if !generics.is_empty() { print_generics(s, generics); @@ -610,8 +612,8 @@ pub fn print_item(s: @ps, &&item: @ast::item) { } word(s.s, ~" "); bopen(s); - for (*methods).each |meth| { - print_trait_method(s, *meth); + for methods.each |meth| { + print_trait_method(s, meth); } bclose(s, item.span); } @@ -655,7 +657,7 @@ pub fn print_enum_def(s: @ps, enum_definition: ast::enum_def, space(s.s); if newtype { word_space(s, ~"="); - match enum_definition.variants[0].node.kind { + match /*bad*/ copy enum_definition.variants[0].node.kind { ast::tuple_variant_kind(args) => print_type(s, args[0].ty), _ => fail!(~"newtype syntax with struct?") } @@ -693,9 +695,8 @@ pub fn visibility_to_str(vis: ast::visibility) -> ~str { pub fn visibility_qualified(vis: ast::visibility, s: ~str) -> ~str { match vis { - ast::private | ast::public => - visibility_to_str(vis) + " " + s, - ast::inherited => s + ast::private | ast::public => visibility_to_str(vis) + " " + s, + ast::inherited => copy s } } @@ -740,7 +741,7 @@ pub fn print_struct(s: @ps, maybe_print_comment(s, dtor.span.lo); print_outer_attributes(s, dtor.node.attrs); head(s, ~"drop"); - print_block(s, dtor.node.body); + print_block(s, &dtor.node.body); } for struct_def.fields.each |field| { @@ -776,14 +777,14 @@ pub fn print_tt(s: @ps, tt: ast::token_tree) { match tt { ast::tt_delim(ref tts) => print_tts(s, *tts), ast::tt_tok(_, ref tk) => { - word(s.s, parse::token::to_str(s.intr, (*tk))); + word(s.s, parse::token::to_str(s.intr, tk)); } ast::tt_seq(_, ref tts, ref sep, zerok) => { word(s.s, ~"$("); for (*tts).each() |tt_elt| { print_tt(s, *tt_elt); } word(s.s, ~")"); match (*sep) { - Some(ref tk) => word(s.s, parse::token::to_str(s.intr, (*tk))), + Some(ref tk) => word(s.s, parse::token::to_str(s.intr, tk)), None => () } word(s.s, if zerok { ~"*" } else { ~"+" }); @@ -808,7 +809,7 @@ pub fn print_tts(s: @ps, &&tts: &[ast::token_tree]) { pub fn print_variant(s: @ps, v: ast::variant) { print_visibility(s, v.node.vis); - match v.node.kind { + match /*bad*/ copy v.node.kind { ast::tuple_variant_kind(args) => { print_ident(s, v.node.name); if !args.is_empty() { @@ -839,20 +840,20 @@ pub fn print_variant(s: @ps, v: ast::variant) { } } -pub fn print_ty_method(s: @ps, m: ast::ty_method) { +pub fn print_ty_method(s: @ps, m: &ast::ty_method) { hardbreak_if_not_bol(s); maybe_print_comment(s, m.span.lo); print_outer_attributes(s, m.attrs); print_ty_fn(s, None, None, None, m.purity, ast::Many, - m.decl, Some(m.ident), Some(&m.generics), - Some(m.self_ty.node)); + &m.decl, Some(m.ident), Some(&m.generics), + Some(/*bad*/ copy m.self_ty.node)); word(s.s, ~";"); } -pub fn print_trait_method(s: @ps, m: ast::trait_method) { - match m { - required(ref ty_m) => print_ty_method(s, (*ty_m)), - provided(m) => print_method(s, m) +pub fn print_trait_method(s: @ps, m: &ast::trait_method) { + match *m { + required(ref ty_m) => print_ty_method(s, ty_m), + provided(m) => print_method(s, m) } } @@ -860,11 +861,11 @@ pub fn print_method(s: @ps, meth: @ast::method) { hardbreak_if_not_bol(s); maybe_print_comment(s, meth.span.lo); print_outer_attributes(s, meth.attrs); - print_fn(s, meth.decl, Some(meth.purity), + print_fn(s, &meth.decl, Some(meth.purity), meth.ident, &meth.generics, Some(meth.self_ty.node), meth.vis); word(s.s, ~" "); - print_block_with_attrs(s, meth.body, meth.attrs); + print_block_with_attrs(s, &meth.body, meth.attrs); } pub fn print_outer_attributes(s: @ps, attrs: ~[ast::attribute]) { @@ -904,7 +905,7 @@ pub fn print_attribute(s: @ps, attr: ast::attribute) { word(s.s, *comment); } else { word(s.s, ~"#["); - print_meta_item(s, @attr.node.value); + print_meta_item(s, attr.node.value); word(s.s, ~"]"); } } @@ -935,22 +936,22 @@ pub fn print_stmt(s: @ps, st: ast::stmt) { maybe_print_trailing_comment(s, st.span, None); } -pub fn print_block(s: @ps, blk: ast::blk) { +pub fn print_block(s: @ps, blk: &ast::blk) { print_possibly_embedded_block(s, blk, block_normal, indent_unit); } -pub fn print_block_unclosed(s: @ps, blk: ast::blk) { +pub fn print_block_unclosed(s: @ps, blk: &ast::blk) { print_possibly_embedded_block_(s, blk, block_normal, indent_unit, ~[], false); } -pub fn print_block_unclosed_indent(s: @ps, blk: ast::blk, indented: uint) { +pub fn print_block_unclosed_indent(s: @ps, blk: &ast::blk, indented: uint) { print_possibly_embedded_block_(s, blk, block_normal, indented, ~[], false); } pub fn print_block_with_attrs(s: @ps, - blk: ast::blk, + blk: &ast::blk, attrs: ~[ast::attribute]) { print_possibly_embedded_block_(s, blk, block_normal, indent_unit, attrs, true); @@ -959,7 +960,7 @@ pub fn print_block_with_attrs(s: @ps, pub enum embed_type { block_block_fn, block_normal, } pub fn print_possibly_embedded_block(s: @ps, - blk: ast::blk, + blk: &ast::blk, embedded: embed_type, indented: uint) { print_possibly_embedded_block_( @@ -967,7 +968,7 @@ pub fn print_possibly_embedded_block(s: @ps, } pub fn print_possibly_embedded_block_(s: @ps, - blk: ast::blk, + blk: &ast::blk, embedded: embed_type, indented: uint, attrs: ~[ast::attribute], @@ -1002,7 +1003,7 @@ pub fn print_possibly_embedded_block_(s: @ps, (s.ann.post)(ann_node); } -pub fn print_if(s: @ps, test: @ast::expr, blk: ast::blk, +pub fn print_if(s: @ps, test: @ast::expr, blk: &ast::blk, elseopt: Option<@ast::expr>, chk: bool) { head(s, ~"if"); if chk { word_nbsp(s, ~"check"); } @@ -1020,7 +1021,7 @@ pub fn print_if(s: @ps, test: @ast::expr, blk: ast::blk, word(s.s, ~" else if "); print_expr(s, i); space(s.s); - print_block(s, (*t)); + print_block(s, t); do_else(s, e); } // "final else" @@ -1028,7 +1029,7 @@ pub fn print_if(s: @ps, test: @ast::expr, blk: ast::blk, cbox(s, indent_unit - 1u); ibox(s, 0u); word(s.s, ~" else "); - print_block(s, (*b)); + print_block(s, b); } // BLEAH, constraints would be great here _ => { @@ -1141,7 +1142,7 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { ibox(s, indent_unit); let ann_node = node_expr(s, expr); (s.ann.pre)(ann_node); - match expr.node { + match /*bad*/ copy expr.node { ast::expr_vstore(e, v) => match v { ast::expr_vstore_fixed(_) => { print_expr(s, e); @@ -1263,13 +1264,13 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { print_type_ex(s, ty, true); } ast::expr_if(test, ref blk, elseopt) => { - print_if(s, test, (*blk), elseopt, false); + print_if(s, test, blk, elseopt, false); } ast::expr_while(test, ref blk) => { head(s, ~"while"); print_expr(s, test); space(s.s); - print_block(s, (*blk)); + print_block(s, blk); } ast::expr_loop(ref blk, opt_ident) => { head(s, ~"loop"); @@ -1278,7 +1279,7 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { print_ident(s, *ident); word_space(s, ~":"); }); - print_block(s, (*blk)); + print_block(s, blk); } ast::expr_match(expr, ref arms) => { cbox(s, match_indent_unit); @@ -1323,7 +1324,7 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { ast::expr_block(ref blk) => { // the block will close the pattern's ibox print_block_unclosed_indent( - s, (*blk), match_indent_unit); + s, blk, match_indent_unit); } _ => { end(s); // close the ibox for the pattern @@ -1340,7 +1341,7 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { } } else { // the block will close the pattern's ibox - print_block_unclosed_indent(s, arm.body, match_indent_unit); + print_block_unclosed_indent(s, &arm.body, match_indent_unit); } } bclose_(s, expr.span, match_indent_unit); @@ -1352,9 +1353,9 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { ibox(s, 0u); print_fn_header_info(s, None, None, ast::Many, Some(sigil), ast::inherited); - print_fn_args_and_ret(s, /* FIXME (#2543) */ copy *decl, None); + print_fn_args_and_ret(s, decl, None); space(s.s); - print_block(s, (*body)); + print_block(s, body); } ast::expr_fn_block(ref decl, ref body) => { // in do/for blocks we don't want to show an empty @@ -1362,19 +1363,19 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { // we are inside. // // if !decl.inputs.is_empty() { - print_fn_block_args(s, /* FIXME (#2543) */ copy *decl); + print_fn_block_args(s, decl); space(s.s); // } - assert (*body).node.stmts.is_empty(); - assert (*body).node.expr.is_some(); + assert body.node.stmts.is_empty(); + assert body.node.expr.is_some(); // we extract the block, so as not to create another set of boxes - match (*body).node.expr.get().node { + match body.node.expr.get().node { ast::expr_block(ref blk) => { - print_block_unclosed(s, (*blk)); + print_block_unclosed(s, blk); } _ => { // this is a bare expression - print_expr(s, (*body).node.expr.get()); + print_expr(s, body.node.expr.get()); end(s); // need to close a box } } @@ -1394,7 +1395,7 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { cbox(s, indent_unit); // head-box, will be closed by print-block after { ibox(s, 0u); - print_block(s, (*blk)); + print_block(s, blk); } ast::expr_copy(e) => { word_space(s, ~"copy"); print_expr(s, e); } ast::expr_assign(lhs, rhs) => { @@ -1490,7 +1491,7 @@ pub fn print_local_decl(s: @ps, loc: @ast::local) { pub fn print_decl(s: @ps, decl: @ast::decl) { maybe_print_comment(s, decl.span.lo); - match decl.node { + match /*bad*/ copy decl.node { ast::decl_local(locs) => { space_if_not_bol(s); ibox(s, indent_unit); @@ -1574,7 +1575,7 @@ pub fn print_pat(s: @ps, &&pat: @ast::pat, refutable: bool) { (s.ann.pre)(ann_node); /* Pat isn't normalized, but the beauty of it is that it doesn't matter */ - match pat.node { + match /*bad*/ copy pat.node { ast::pat_wild => word(s.s, ~"_"), ast::pat_ident(binding_mode, path, sub) => { if refutable { @@ -1711,7 +1712,7 @@ pub fn print_self_ty(s: @ps, self_ty: ast::self_ty_) -> bool { } pub fn print_fn(s: @ps, - decl: ast::fn_decl, + decl: &ast::fn_decl, purity: Option, name: ast::ident, generics: &ast::Generics, @@ -1725,7 +1726,7 @@ pub fn print_fn(s: @ps, print_fn_args_and_ret(s, decl, opt_self_ty); } -pub fn print_fn_args(s: @ps, decl: ast::fn_decl, +pub fn print_fn_args(s: @ps, decl: &ast::fn_decl, opt_self_ty: Option) { // It is unfortunate to duplicate the commasep logic, but we we want the // self type and the args all in the same box. @@ -1743,7 +1744,7 @@ pub fn print_fn_args(s: @ps, decl: ast::fn_decl, end(s); } -pub fn print_fn_args_and_ret(s: @ps, decl: ast::fn_decl, +pub fn print_fn_args_and_ret(s: @ps, decl: &ast::fn_decl, opt_self_ty: Option) { popen(s); print_fn_args(s, decl, opt_self_ty); @@ -1760,7 +1761,7 @@ pub fn print_fn_args_and_ret(s: @ps, decl: ast::fn_decl, } } -pub fn print_fn_block_args(s: @ps, decl: ast::fn_decl) { +pub fn print_fn_block_args(s: @ps, decl: &ast::fn_decl) { word(s.s, ~"|"); print_fn_args(s, decl, None); word(s.s, ~"|"); @@ -1905,7 +1906,7 @@ pub fn print_view_item(s: @ps, item: @ast::view_item) { maybe_print_comment(s, item.span.lo); print_outer_attributes(s, item.attrs); print_visibility(s, item.vis); - match item.node { + match /*bad*/ copy item.node { ast::view_item_extern_mod(id, mta, _) => { head(s, ~"extern mod"); print_ident(s, id); @@ -1972,7 +1973,7 @@ pub fn print_ty_fn(s: @ps, opt_region: Option<@ast::region>, purity: ast::purity, onceness: ast::Onceness, - decl: ast::fn_decl, id: Option, + decl: &ast::fn_decl, id: Option, generics: Option<&ast::Generics>, opt_self_ty: Option) { ibox(s, indent_unit); @@ -1987,7 +1988,7 @@ pub fn print_ty_fn(s: @ps, print_onceness(s, onceness); word(s.s, ~"fn"); match id { Some(id) => { word(s.s, ~" "); print_ident(s, id); } _ => () } - match generics { Some(g) => print_generics(s, g), _ => () } + match /*bad*/ copy generics { Some(g) => print_generics(s, g), _ => () } zerobreak(s.s); popen(s); @@ -2114,7 +2115,7 @@ pub fn next_lit(s: @ps, pos: BytePos) -> Option { match s.literals { Some(ref lits) => { while s.cur_cmnt_and_lit.cur_lit < vec::len((*lits)) { - let ltrl = (*lits)[s.cur_cmnt_and_lit.cur_lit]; + let ltrl = /*bad*/ copy (*lits)[s.cur_cmnt_and_lit.cur_lit]; if ltrl.pos > pos { return None; } s.cur_cmnt_and_lit.cur_lit += 1u; if ltrl.pos == pos { return Some(ltrl); } @@ -2201,7 +2202,7 @@ pub fn next_comment(s: @ps) -> Option { match s.comments { Some(ref cmnts) => { if s.cur_cmnt_and_lit.cur_cmnt < vec::len((*cmnts)) { - return Some((*cmnts)[s.cur_cmnt_and_lit.cur_cmnt]); + return Some(copy cmnts[s.cur_cmnt_and_lit.cur_cmnt]); } else { return None::; } } _ => return None:: @@ -2316,7 +2317,7 @@ pub mod test { let mock_interner = parse::token::mk_fake_ident_interner(); let abba_ident = mock_interner.intern(@~"abba"); - let decl: ast::fn_decl = ast::fn_decl { + let decl = ast::fn_decl { inputs: ~[], output: @ast::Ty {id: 0, node: ast::ty_nil, @@ -2324,7 +2325,7 @@ pub mod test { cf: ast::return_val }; let generics = ast_util::empty_generics(); - check_equal (&fun_to_str(decl, abba_ident, &generics, mock_interner), + check_equal (&fun_to_str(&decl, abba_ident, &generics, mock_interner), &~"fn abba()"); } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 70dec6f3343cd..5919271664e30 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -32,17 +32,20 @@ use core::vec; pub enum vt { mk_vt(visitor), } pub enum fn_kind { - fk_item_fn(ident, Generics, purity), // fn foo() - fk_method(ident, Generics, @method), // fn foo(&self) + fk_item_fn(ident, &Generics, purity), // fn foo() + fk_method(ident, &Generics, &method), // fn foo(&self) fk_anon(ast::Sigil), // fn@(x, y) { ... } fk_fn_block, // |x, y| ... - fk_dtor(Generics, ~[attribute], node_id /* self id */, - def_id /* parent class id */) // class destructor - + fk_dtor( // class destructor + &Generics, + &[attribute], + node_id /* self id */, + def_id /* parent class id */ + ) } -pub fn name_of_fn(fk: fn_kind) -> ident { - match fk { +pub fn name_of_fn(fk: &fn_kind) -> ident { + match *fk { fk_item_fn(name, _, _) | fk_method(name, _, _) => { /* FIXME (#2543) */ copy name } @@ -51,38 +54,40 @@ pub fn name_of_fn(fk: fn_kind) -> ident { } } -pub fn generics_of_fn(fk: fn_kind) -> Generics { - match fk { - fk_item_fn(_, tps, _) | - fk_method(_, tps, _) | - fk_dtor(tps, _, _, _) => { - copy tps +pub fn generics_of_fn(fk: &fn_kind) -> Generics { + match *fk { + fk_item_fn(_, generics, _) | + fk_method(_, generics, _) | + fk_dtor(generics, _, _, _) => { + copy *generics } fk_anon(*) | fk_fn_block(*) => { - Generics {lifetimes: opt_vec::Empty, - ty_params: opt_vec::Empty} + Generics { + lifetimes: opt_vec::Empty, + ty_params: opt_vec::Empty, + } } } } pub struct Visitor { - visit_mod: fn@(_mod, span, node_id, E, vt), + visit_mod: fn@(&_mod, span, node_id, E, vt), visit_view_item: fn@(@view_item, E, vt), visit_foreign_item: fn@(@foreign_item, E, vt), visit_item: fn@(@item, E, vt), visit_local: fn@(@local, E, vt), - visit_block: fn@(ast::blk, E, vt), + visit_block: fn@(&blk, E, vt), visit_stmt: fn@(@stmt, E, vt), - visit_arm: fn@(arm, E, vt), + visit_arm: fn@(&arm, E, vt), visit_pat: fn@(@pat, E, vt), visit_decl: fn@(@decl, E, vt), visit_expr: fn@(@expr, E, vt), visit_expr_post: fn@(@expr, E, vt), visit_ty: fn@(@Ty, E, vt), visit_generics: fn@(&Generics, E, vt), - visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id, E, vt), - visit_ty_method: fn@(ty_method, E, vt), - visit_trait_method: fn@(trait_method, E, vt), + visit_fn: fn@(&fn_kind, &fn_decl, &blk, span, node_id, E, vt), + visit_ty_method: fn@(&ty_method, E, vt), + visit_trait_method: fn@(&trait_method, E, vt), visit_struct_def: fn@(@struct_def, ident, &Generics, node_id, E, vt), visit_struct_field: fn@(@struct_field, E, vt), @@ -118,10 +123,10 @@ pub fn default_visitor() -> visitor { } pub fn visit_crate(c: crate, e: E, v: vt) { - (v.visit_mod)(c.node.module, c.span, crate_node_id, e, v); + (v.visit_mod)(&c.node.module, c.span, crate_node_id, e, v); } -pub fn visit_mod(m: _mod, _sp: span, _id: node_id, e: E, v: vt) { +pub fn visit_mod(m: &_mod, _sp: span, _id: node_id, e: E, v: vt) { for m.view_items.each |vi| { (v.visit_view_item)(*vi, e, v); } for m.items.each |i| { (v.visit_item)(*i, e, v); } } @@ -139,65 +144,65 @@ pub fn visit_local(loc: @local, e: E, v: vt) { pub fn visit_item(i: @item, e: E, v: vt) { match i.node { - item_const(t, ex) => { - (v.visit_ty)(t, e, v); - (v.visit_expr)(ex, e, v); - } - item_fn(ref decl, purity, ref tp, ref body) => { - (v.visit_fn)( - fk_item_fn( - /* FIXME (#2543) */ copy i.ident, - /* FIXME (#2543) */ copy *tp, - purity - ), - /* FIXME (#2543) */ copy *decl, - (*body), - i.span, - i.id, - e, - v - ); - } - item_mod(m) => (v.visit_mod)(m, i.span, i.id, e, v), - item_foreign_mod(nm) => { - for nm.view_items.each |vi| { (v.visit_view_item)(*vi, e, v); } - for nm.items.each |ni| { (v.visit_foreign_item)(*ni, e, v); } - } - item_ty(t, ref tps) => { - (v.visit_ty)(t, e, v); - (v.visit_generics)(tps, e, v); - } - item_enum(ref enum_definition, ref tps) => { - (v.visit_generics)(tps, e, v); - visit_enum_def( - *enum_definition, - tps, - e, - v - ); - } - item_impl(ref tps, traits, ty, methods) => { - (v.visit_generics)(tps, e, v); - for traits.each |p| { - visit_path(p.path, e, v); + item_const(t, ex) => { + (v.visit_ty)(t, e, v); + (v.visit_expr)(ex, e, v); } - (v.visit_ty)(ty, e, v); - for methods.each |m| { - visit_method_helper(*m, e, v) + item_fn(ref decl, purity, ref generics, ref body) => { + (v.visit_fn)( + &fk_item_fn( + /* FIXME (#2543) */ copy i.ident, + generics, + purity + ), + decl, + body, + i.span, + i.id, + e, + v + ); } - } - item_struct(struct_def, ref tps) => { - (v.visit_generics)(tps, e, v); - (v.visit_struct_def)(struct_def, i.ident, tps, i.id, e, v); - } - item_trait(ref tps, ref traits, ref methods) => { - (v.visit_generics)(tps, e, v); - for traits.each |p| { visit_path(p.path, e, v); } - for (*methods).each |m| { - (v.visit_trait_method)(*m, e, v); + item_mod(ref m) => (v.visit_mod)(m, i.span, i.id, e, v), + item_foreign_mod(ref nm) => { + for nm.view_items.each |vi| { (v.visit_view_item)(*vi, e, v); } + for nm.items.each |ni| { (v.visit_foreign_item)(*ni, e, v); } } - } - item_mac(ref m) => visit_mac((*m), e, v) + item_ty(t, ref tps) => { + (v.visit_ty)(t, e, v); + (v.visit_generics)(tps, e, v); + } + item_enum(ref enum_definition, ref tps) => { + (v.visit_generics)(tps, e, v); + visit_enum_def( + *enum_definition, + tps, + e, + v + ); + } + item_impl(ref tps, ref traits, ty, ref methods) => { + (v.visit_generics)(tps, e, v); + for traits.each |p| { + visit_path(p.path, e, v); + } + (v.visit_ty)(ty, e, v); + for methods.each |m| { + visit_method_helper(*m, e, v) + } + } + item_struct(struct_def, ref generics) => { + (v.visit_generics)(generics, e, v); + (v.visit_struct_def)(struct_def, i.ident, generics, i.id, e, v); + } + item_trait(ref generics, ref traits, ref methods) => { + (v.visit_generics)(generics, e, v); + for traits.each |p| { visit_path(p.path, e, v); } + for methods.each |m| { + (v.visit_trait_method)(m, e, v); + } + } + item_mac(ref m) => visit_mac((*m), e, v) } } @@ -207,7 +212,7 @@ pub fn visit_enum_def(enum_definition: ast::enum_def, v: vt) { for enum_definition.variants.each |vr| { match vr.node.kind { - tuple_variant_kind(variant_args) => { + tuple_variant_kind(ref variant_args) => { for variant_args.each |va| { (v.visit_ty)(va.ty, e, v); } } struct_variant_kind(struct_def) => { @@ -227,30 +232,31 @@ pub fn skip_ty(_t: @Ty, _e: E, _v: vt) {} pub fn visit_ty(t: @Ty, e: E, v: vt) { match t.node { - ty_box(mt) | ty_uniq(mt) | - ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) => { - (v.visit_ty)(mt.ty, e, v); - } - ty_rec(ref flds) => for (*flds).each |f| { - (v.visit_ty)(f.node.mt.ty, e, v); - }, - ty_tup(ts) => for ts.each |tt| { - (v.visit_ty)(*tt, e, v); - }, - ty_closure(f) => { - for f.decl.inputs.each |a| { (v.visit_ty)(a.ty, e, v); } - (v.visit_ty)(f.decl.output, e, v); - } - ty_bare_fn(f) => { - for f.decl.inputs.each |a| { (v.visit_ty)(a.ty, e, v); } - (v.visit_ty)(f.decl.output, e, v); - } - ty_path(p, _) => visit_path(p, e, v), - ty_fixed_length_vec(mt, _) => (v.visit_ty)(mt.ty, e, v), - ty_nil | - ty_bot | - ty_mac(_) | - ty_infer => () + ty_box(mt) | ty_uniq(mt) | + ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) => { + (v.visit_ty)(mt.ty, e, v); + }, + ty_rec(ref flds) => { + for flds.each |f| { + (v.visit_ty)(f.node.mt.ty, e, v); + } + }, + ty_tup(ref ts) => { + for ts.each |tt| { + (v.visit_ty)(*tt, e, v); + } + }, + ty_closure(ref f) => { + for f.decl.inputs.each |a| { (v.visit_ty)(a.ty, e, v); } + (v.visit_ty)(f.decl.output, e, v); + }, + ty_bare_fn(ref f) => { + for f.decl.inputs.each |a| { (v.visit_ty)(a.ty, e, v); } + (v.visit_ty)(f.decl.output, e, v); + }, + ty_path(p, _) => visit_path(p, e, v), + ty_fixed_length_vec(ref mt, _) => (v.visit_ty)(mt.ty, e, v), + ty_nil | ty_bot | ty_mac(_) | ty_infer => () } } @@ -260,55 +266,61 @@ pub fn visit_path(p: @path, e: E, v: vt) { pub fn visit_pat(p: @pat, e: E, v: vt) { match p.node { - pat_enum(path, children) => { - visit_path(path, e, v); - do option::iter(&children) |children| { - for children.each |child| { (v.visit_pat)(*child, e, v); }} - } - pat_rec(fields, _) => for fields.each |f| { - (v.visit_pat)(f.pat, e, v) - }, - pat_struct(path, fields, _) => { - visit_path(path, e, v); - for fields.each |f| { - (v.visit_pat)(f.pat, e, v); + pat_enum(path, ref children) => { + visit_path(path, e, v); + do children.iter |children| { + for children.each |child| { (v.visit_pat)(*child, e, v); } + } } - } - pat_tup(elts) => for elts.each |elt| { - (v.visit_pat)(*elt, e, v) - }, - pat_box(inner) | pat_uniq(inner) | pat_region(inner) => - (v.visit_pat)(inner, e, v), - pat_ident(_, path, inner) => { - visit_path(path, e, v); - do option::iter(&inner) |subpat| { (v.visit_pat)(*subpat, e, v)}; - } - pat_lit(ex) => (v.visit_expr)(ex, e, v), - pat_range(e1, e2) => { - (v.visit_expr)(e1, e, v); - (v.visit_expr)(e2, e, v); - } - pat_wild => (), - pat_vec(elts, tail) => { - for elts.each |elt| { - (v.visit_pat)(*elt, e, v); + pat_rec(ref fields, _) => { + for fields.each |f| { + (v.visit_pat)(f.pat, e, v) + } + }, + pat_struct(path, ref fields, _) => { + visit_path(path, e, v); + for fields.each |f| { + (v.visit_pat)(f.pat, e, v); + } } - do option::iter(&tail) |tail| { - (v.visit_pat)(*tail, e, v); + pat_tup(ref elts) => { + for elts.each |elt| { + (v.visit_pat)(*elt, e, v) + } + }, + pat_box(inner) | pat_uniq(inner) | pat_region(inner) => { + (v.visit_pat)(inner, e, v) + }, + pat_ident(_, path, ref inner) => { + visit_path(path, e, v); + do inner.iter |subpat| { (v.visit_pat)(*subpat, e, v) } + } + pat_lit(ex) => (v.visit_expr)(ex, e, v), + pat_range(e1, e2) => { + (v.visit_expr)(e1, e, v); + (v.visit_expr)(e2, e, v); + } + pat_wild => (), + pat_vec(ref elts, ref tail) => { + for elts.each |elt| { + (v.visit_pat)(*elt, e, v); + } + do tail.iter |tail| { + (v.visit_pat)(*tail, e, v); + } } - } } } pub fn visit_foreign_item(ni: @foreign_item, e: E, v: vt) { match ni.node { - foreign_item_fn(fd, _, ref generics) => { - visit_fn_decl(fd, e, v); - (v.visit_generics)(generics, e, v); - } - foreign_item_const(t) => { - (v.visit_ty)(t, e, v); - } + foreign_item_fn(ref fd, _, ref generics) => { + visit_fn_decl(fd, e, v); + (v.visit_generics)(generics, e, v); + } + foreign_item_const(t) => { + (v.visit_ty)(t, e, v); + } } } @@ -328,7 +340,7 @@ pub fn visit_generics(generics: &Generics, e: E, v: vt) { } } -pub fn visit_fn_decl(fd: fn_decl, e: E, v: vt) { +pub fn visit_fn_decl(fd: &fn_decl, e: E, v: vt) { for fd.inputs.each |a| { (v.visit_pat)(a.pat, e, v); (v.visit_ty)(a.ty, e, v); @@ -340,24 +352,42 @@ pub fn visit_fn_decl(fd: fn_decl, e: E, v: vt) { // visit_fn() and check for fk_method(). I named this visit_method_helper() // because it is not a default impl of any method, though I doubt that really // clarifies anything. - Niko -pub fn visit_method_helper(m: @method, e: E, v: vt) { - (v.visit_fn)(fk_method(m.ident, /* FIXME (#2543) */ copy m.generics, m), - m.decl, m.body, m.span, m.id, e, v); +pub fn visit_method_helper(m: &method, e: E, v: vt) { + (v.visit_fn)( + &fk_method( + /* FIXME (#2543) */ copy m.ident, + &m.generics, + m + ), + &m.decl, + &m.body, + m.span, + m.id, + e, + v + ); } pub fn visit_struct_dtor_helper(dtor: struct_dtor, generics: &Generics, parent_id: def_id, e: E, v: vt) { - (v.visit_fn)(fk_dtor(copy *generics, dtor.node.attrs, - dtor.node.self_id, parent_id), - ast_util::dtor_dec(), - dtor.node.body, - dtor.span, - dtor.node.id, - e, v) - -} - -pub fn visit_fn(fk: fn_kind, decl: fn_decl, body: blk, _sp: span, + (v.visit_fn)( + &fk_dtor( + generics, + dtor.node.attrs, + dtor.node.self_id, + parent_id + ), + &ast_util::dtor_dec(), + &dtor.node.body, + dtor.span, + dtor.node.id, + e, + v + ) + +} + +pub fn visit_fn(fk: &fn_kind, decl: &fn_decl, body: &blk, _sp: span, _id: node_id, e: E, v: vt) { visit_fn_decl(decl, e, v); let generics = generics_of_fn(fk); @@ -365,29 +395,38 @@ pub fn visit_fn(fk: fn_kind, decl: fn_decl, body: blk, _sp: span, (v.visit_block)(body, e, v); } -pub fn visit_ty_method(m: ty_method, e: E, v: vt) { +pub fn visit_ty_method(m: &ty_method, e: E, v: vt) { for m.decl.inputs.each |a| { (v.visit_ty)(a.ty, e, v); } (v.visit_generics)(&m.generics, e, v); (v.visit_ty)(m.decl.output, e, v); } -pub fn visit_trait_method(m: trait_method, e: E, v: vt) { - match m { - required(ref ty_m) => (v.visit_ty_method)((*ty_m), e, v), +pub fn visit_trait_method(m: &trait_method, e: E, v: vt) { + match *m { + required(ref ty_m) => (v.visit_ty_method)(ty_m, e, v), provided(m) => visit_method_helper(m, e, v) } } -pub fn visit_struct_def(sd: @struct_def, - _nm: ast::ident, - generics: &Generics, - id: node_id, - e: E, v: vt) { +pub fn visit_struct_def( + sd: @struct_def, + _nm: ast::ident, + generics: &Generics, + id: node_id, + e: E, + v: vt +) { for sd.fields.each |f| { (v.visit_struct_field)(*f, e, v); } - do option::iter(&sd.dtor) |dtor| { - visit_struct_dtor_helper(*dtor, generics, ast_util::local_def(id), e, v) + do sd.dtor.iter |dtor| { + visit_struct_dtor_helper( + *dtor, + generics, + ast_util::local_def(id), + e, + v + ) }; } @@ -399,7 +438,7 @@ pub fn visit_struct_method(m: @method, e: E, v: vt) { visit_method_helper(m, e, v); } -pub fn visit_block(b: ast::blk, e: E, v: vt) { +pub fn visit_block(b: &blk, e: E, v: vt) { for b.node.view_items.each |vi| { (v.visit_view_item)(*vi, e, v); } @@ -420,10 +459,12 @@ pub fn visit_stmt(s: @stmt, e: E, v: vt) { pub fn visit_decl(d: @decl, e: E, v: vt) { match d.node { - decl_local(locs) => for locs.each |loc| { - (v.visit_local)(*loc, e, v) - }, - decl_item(it) => (v.visit_item)(it, e, v) + decl_local(ref locs) => { + for locs.each |loc| { + (v.visit_local)(*loc, e, v) + } + }, + decl_item(it) => (v.visit_item)(it, e, v) } } @@ -431,7 +472,7 @@ pub fn visit_expr_opt(eo: Option<@expr>, e: E, v: vt) { match eo { None => (), Some(ex) => (v.visit_expr)(ex, e, v) } } -pub fn visit_exprs(exprs: ~[@expr], e: E, v: vt) { +pub fn visit_exprs(exprs: &[@expr], e: E, v: vt) { for exprs.each |ex| { (v.visit_expr)(*ex, e, v); } } @@ -441,135 +482,144 @@ pub fn visit_mac(_m: mac, _e: E, _v: vt) { pub fn visit_expr(ex: @expr, e: E, v: vt) { match ex.node { - expr_vstore(x, _) => (v.visit_expr)(x, e, v), - expr_vec(es, _) => visit_exprs(es, e, v), - expr_repeat(element, count, _) => { - (v.visit_expr)(element, e, v); - (v.visit_expr)(count, e, v); - } - expr_rec(ref flds, base) => { - for (*flds).each |f| { (v.visit_expr)(f.node.expr, e, v); } - visit_expr_opt(base, e, v); - } - expr_struct(p, ref flds, base) => { - visit_path(p, e, v); - for (*flds).each |f| { (v.visit_expr)(f.node.expr, e, v); } - visit_expr_opt(base, e, v); - } - expr_tup(elts) => for elts.each |el| { (v.visit_expr)(*el, e, v); }, - expr_call(callee, args, _) => { - visit_exprs(args, e, v); - (v.visit_expr)(callee, e, v); - } - expr_method_call(callee, _, tys, args, _) => { - visit_exprs(args, e, v); - for tys.each |tp| { (v.visit_ty)(*tp, e, v); } - (v.visit_expr)(callee, e, v); - } - expr_binary(_, a, b) => { - (v.visit_expr)(a, e, v); (v.visit_expr)(b, e, v); - } - expr_addr_of(_, x) | expr_unary(_, x) | - expr_loop_body(x) | expr_do_body(x) | - expr_assert(x) => (v.visit_expr)(x, e, v), - expr_lit(_) => (), - expr_cast(x, t) => { (v.visit_expr)(x, e, v); (v.visit_ty)(t, e, v); } - expr_if(x, ref b, eo) => { - (v.visit_expr)(x, e, v); - (v.visit_block)((*b), e, v); - visit_expr_opt(eo, e, v); - } - expr_while(x, ref b) => { - (v.visit_expr)(x, e, v); - (v.visit_block)((*b), e, v); - } - expr_loop(ref b, _) => (v.visit_block)((*b), e, v), - expr_match(x, ref arms) => { - (v.visit_expr)(x, e, v); - for (*arms).each |a| { (v.visit_arm)(*a, e, v); } - } - expr_fn(proto, ref decl, ref body, _) => { - (v.visit_fn)( - fk_anon(proto), - /* FIXME (#2543) */ copy *decl, - *body, - ex.span, - ex.id, - e, - v - ); - } - expr_fn_block(ref decl, ref body) => { - (v.visit_fn)( - fk_fn_block, - /* FIXME (#2543) */ copy *decl, - *body, - ex.span, - ex.id, - e, - v - ); - } - expr_block(ref b) => (v.visit_block)((*b), e, v), - expr_assign(a, b) => { - (v.visit_expr)(b, e, v); - (v.visit_expr)(a, e, v); - } - expr_copy(a) => (v.visit_expr)(a, e, v), - expr_swap(a, b) => { (v.visit_expr)(a, e, v); (v.visit_expr)(b, e, v); } - expr_assign_op(_, a, b) => { - (v.visit_expr)(b, e, v); - (v.visit_expr)(a, e, v); - } - expr_field(x, _, tys) => { - (v.visit_expr)(x, e, v); - for tys.each |tp| { (v.visit_ty)(*tp, e, v); } - } - expr_index(a, b) => { - (v.visit_expr)(a, e, v); - (v.visit_expr)(b, e, v); - } - expr_path(p) => visit_path(p, e, v), - expr_break(_) => (), - expr_again(_) => (), - expr_ret(eo) => visit_expr_opt(eo, e, v), - expr_log(_, lv, x) => { - (v.visit_expr)(lv, e, v); - (v.visit_expr)(x, e, v); - } - expr_mac(ref mac) => visit_mac((*mac), e, v), - expr_paren(x) => (v.visit_expr)(x, e, v), + expr_vstore(x, _) => (v.visit_expr)(x, e, v), + expr_vec(ref es, _) => visit_exprs(*es, e, v), + expr_repeat(element, count, _) => { + (v.visit_expr)(element, e, v); + (v.visit_expr)(count, e, v); + } + expr_rec(ref flds, base) => { + for flds.each |f| { (v.visit_expr)(f.node.expr, e, v); } + visit_expr_opt(base, e, v); + } + expr_struct(p, ref flds, base) => { + visit_path(p, e, v); + for flds.each |f| { (v.visit_expr)(f.node.expr, e, v); } + visit_expr_opt(base, e, v); + } + expr_tup(ref elts) => { + for elts.each |el| { (v.visit_expr)(*el, e, v) } + } + expr_call(callee, ref args, _) => { + visit_exprs(*args, e, v); + (v.visit_expr)(callee, e, v); + } + expr_method_call(callee, _, ref tys, ref args, _) => { + visit_exprs(*args, e, v); + for tys.each |tp| { (v.visit_ty)(*tp, e, v); } + (v.visit_expr)(callee, e, v); + } + expr_binary(_, a, b) => { + (v.visit_expr)(a, e, v); + (v.visit_expr)(b, e, v); + } + expr_addr_of(_, x) | expr_unary(_, x) | + expr_loop_body(x) | expr_do_body(x) | + expr_assert(x) => (v.visit_expr)(x, e, v), + expr_lit(_) => (), + expr_cast(x, t) => { + (v.visit_expr)(x, e, v); + (v.visit_ty)(t, e, v); + } + expr_if(x, ref b, eo) => { + (v.visit_expr)(x, e, v); + (v.visit_block)(b, e, v); + visit_expr_opt(eo, e, v); + } + expr_while(x, ref b) => { + (v.visit_expr)(x, e, v); + (v.visit_block)(b, e, v); + } + expr_loop(ref b, _) => (v.visit_block)(b, e, v), + expr_match(x, ref arms) => { + (v.visit_expr)(x, e, v); + for arms.each |a| { (v.visit_arm)(a, e, v); } + } + expr_fn(proto, ref decl, ref body, _) => { + (v.visit_fn)( + &fk_anon(proto), + decl, + body, + ex.span, + ex.id, + e, + v + ); + } + expr_fn_block(ref decl, ref body) => { + (v.visit_fn)( + &fk_fn_block, + decl, + body, + ex.span, + ex.id, + e, + v + ); + } + expr_block(ref b) => (v.visit_block)(b, e, v), + expr_assign(a, b) => { + (v.visit_expr)(b, e, v); + (v.visit_expr)(a, e, v); + } + expr_copy(a) => (v.visit_expr)(a, e, v), + expr_swap(a, b) => { + (v.visit_expr)(a, e, v); + (v.visit_expr)(b, e, v); + } + expr_assign_op(_, a, b) => { + (v.visit_expr)(b, e, v); + (v.visit_expr)(a, e, v); + } + expr_field(x, _, ref tys) => { + (v.visit_expr)(x, e, v); + for tys.each |tp| { (v.visit_ty)(*tp, e, v); } + } + expr_index(a, b) => { + (v.visit_expr)(a, e, v); + (v.visit_expr)(b, e, v); + } + expr_path(p) => visit_path(p, e, v), + expr_break(_) => (), + expr_again(_) => (), + expr_ret(eo) => visit_expr_opt(eo, e, v), + expr_log(_, lv, x) => { + (v.visit_expr)(lv, e, v); + (v.visit_expr)(x, e, v); + } + expr_mac(ref mac) => visit_mac((*mac), e, v), + expr_paren(x) => (v.visit_expr)(x, e, v), } (v.visit_expr_post)(ex, e, v); } -pub fn visit_arm(a: arm, e: E, v: vt) { +pub fn visit_arm(a: &arm, e: E, v: vt) { for a.pats.each |p| { (v.visit_pat)(*p, e, v); } visit_expr_opt(a.guard, e, v); - (v.visit_block)(a.body, e, v); + (v.visit_block)(&a.body, e, v); } // Simpler, non-context passing interface. Always walks the whole tree, simply // calls the given functions on the nodes. pub struct SimpleVisitor { - visit_mod: fn@(_mod, span, node_id), + visit_mod: fn@(&_mod, span, node_id), visit_view_item: fn@(@view_item), visit_foreign_item: fn@(@foreign_item), visit_item: fn@(@item), visit_local: fn@(@local), - visit_block: fn@(ast::blk), + visit_block: fn@(&blk), visit_stmt: fn@(@stmt), - visit_arm: fn@(arm), + visit_arm: fn@(&arm), visit_pat: fn@(@pat), visit_decl: fn@(@decl), visit_expr: fn@(@expr), visit_expr_post: fn@(@expr), visit_ty: fn@(@Ty), visit_generics: fn@(&Generics), - visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id), - visit_ty_method: fn@(ty_method), - visit_trait_method: fn@(trait_method), + visit_fn: fn@(&fn_kind, &fn_decl, &blk, span, node_id), + visit_ty_method: fn@(&ty_method), + visit_trait_method: fn@(&trait_method), visit_struct_def: fn@(@struct_def, ident, &Generics, node_id), visit_struct_field: fn@(@struct_field), visit_struct_method: fn@(@method) @@ -580,34 +630,41 @@ pub type simple_visitor = @SimpleVisitor; pub fn simple_ignore_ty(_t: @Ty) {} pub fn default_simple_visitor() -> @SimpleVisitor { - return @SimpleVisitor {visit_mod: |_m: _mod, _sp: span, _id: node_id| { }, - visit_view_item: |_vi: @view_item| { }, - visit_foreign_item: |_ni: @foreign_item| { }, - visit_item: |_i: @item| { }, - visit_local: |_l: @local| { }, - visit_block: |_b: ast::blk| { }, - visit_stmt: |_s: @stmt| { }, - visit_arm: |_a: arm| { }, - visit_pat: |_p: @pat| { }, - visit_decl: |_d: @decl| { }, - visit_expr: |_e: @expr| { }, - visit_expr_post: |_e: @expr| { }, - visit_ty: simple_ignore_ty, - visit_generics: fn@(_ps: &Generics) {}, - visit_fn: fn@(_fk: fn_kind, _d: fn_decl, _b: blk, _sp: span, - _id: node_id) { }, - visit_ty_method: fn@(_m: ty_method) { }, - visit_trait_method: fn@(_m: trait_method) { }, - visit_struct_def: fn@(_sd: @struct_def, _nm: ident, - _generics: &Generics, _id: node_id) { }, - visit_struct_field: fn@(_f: @struct_field) { }, - visit_struct_method: fn@(_m: @method) { } - }; + @SimpleVisitor { + visit_mod: |_m, _sp, _id| { }, + visit_view_item: |_vi| { }, + visit_foreign_item: |_ni| { }, + visit_item: |_i| { }, + visit_local: |_l| { }, + visit_block: |_b| { }, + visit_stmt: |_s| { }, + visit_arm: |_a| { }, + visit_pat: |_p| { }, + visit_decl: |_d| { }, + visit_expr: |_e| { }, + visit_expr_post: |_e| { }, + visit_ty: simple_ignore_ty, + visit_generics: fn@(_ps: &Generics) { }, + visit_fn: fn@(_fk: &fn_kind, _d: &fn_decl, _b: &blk, _sp: span, + _id: node_id) { }, + visit_ty_method: fn@(_m: &ty_method) { }, + visit_trait_method: fn@(_m: &trait_method) { }, + visit_struct_def: fn@(_sd: @struct_def, _nm: ident, + _generics: &Generics, _id: node_id) { }, + visit_struct_field: fn@(_f: @struct_field) { }, + visit_struct_method: fn@(_m: @method) { } + } } pub fn mk_simple_visitor(v: simple_visitor) -> vt<()> { - fn v_mod(f: fn@(_mod, span, node_id), m: _mod, sp: span, id: node_id, - &&e: (), v: vt<()>) { + fn v_mod( + f: fn@(&_mod, span, node_id), + m: &_mod, + sp: span, + id: node_id, + &&e: (), + v: vt<()> + ) { f(m, sp, id); visit_mod(m, sp, id, e, v); } @@ -628,7 +685,7 @@ pub fn mk_simple_visitor(v: simple_visitor) -> vt<()> { f(l); visit_local(l, e, v); } - fn v_block(f: fn@(ast::blk), bl: ast::blk, &&e: (), v: vt<()>) { + fn v_block(f: fn@(&blk), bl: &blk, &&e: (), v: vt<()>) { f(bl); visit_block(bl, e, v); } @@ -636,7 +693,7 @@ pub fn mk_simple_visitor(v: simple_visitor) -> vt<()> { f(st); visit_stmt(st, e, v); } - fn v_arm(f: fn@(arm), a: arm, &&e: (), v: vt<()>) { + fn v_arm(f: fn@(&arm), a: &arm, &&e: (), v: vt<()>) { f(a); visit_arm(a, e, v); } @@ -659,33 +716,46 @@ pub fn mk_simple_visitor(v: simple_visitor) -> vt<()> { f(ty); visit_ty(ty, e, v); } - fn v_ty_method(f: fn@(ty_method), ty: ty_method, &&e: (), v: vt<()>) { + fn v_ty_method(f: fn@(&ty_method), ty: &ty_method, &&e: (), v: vt<()>) { f(ty); visit_ty_method(ty, e, v); } - fn v_trait_method(f: fn@(trait_method), m: trait_method, &&e: (), + fn v_trait_method(f: fn@(&trait_method), m: &trait_method, &&e: (), v: vt<()>) { f(m); visit_trait_method(m, e, v); } - fn v_struct_def(f: fn@(@struct_def, ident, &Generics, node_id), - sd: @struct_def, - nm: ident, - generics: &Generics, - id: node_id, - &&e: (), v: vt<()>) { + fn v_struct_def( + f: fn@(@struct_def, ident, &Generics, node_id), + sd: @struct_def, + nm: ident, + generics: &Generics, + id: node_id, + &&e: (), + v: vt<()> + ) { f(sd, nm, generics, id); visit_struct_def(sd, nm, generics, id, e, v); } - fn v_generics(f: fn@(&Generics), - ps: &Generics, - &&e: (), v: vt<()>) { + fn v_generics( + f: fn@(&Generics), + ps: &Generics, + &&e: (), + v: vt<()> + ) { f(ps); visit_generics(ps, e, v); } - fn v_fn(f: fn@(fn_kind, fn_decl, blk, span, node_id), - fk: fn_kind, decl: fn_decl, body: blk, sp: span, - id: node_id, &&e: (), v: vt<()>) { + fn v_fn( + f: fn@(&fn_kind, &fn_decl, &blk, span, node_id), + fk: &fn_kind, + decl: &fn_decl, + body: &blk, + sp: span, + id: node_id, + &&e: (), + v: vt<()> + ) { f(fk, decl, body, sp, id); visit_fn(fk, decl, body, sp, id, e, v); }