From 5b9e110eab9c30428e99995a0adbec82857e3a1a Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Mon, 25 Feb 2013 06:19:44 -0800 Subject: [PATCH 01/30] libsyntax: Convert ast::attribute_ to store a @meta_item --- src/librustc/driver/session.rs | 2 +- src/librustc/front/core_inject.rs | 2 +- src/librustc/metadata/decoder.rs | 2 +- src/librustc/metadata/encoder.rs | 4 ++-- src/librustc/middle/lang_items.rs | 10 +++++----- src/libsyntax/ast.rs | 2 +- src/libsyntax/attr.rs | 15 ++++++++------- src/libsyntax/ext/auto_encode.rs | 4 ++-- src/libsyntax/ext/base.rs | 2 +- src/libsyntax/ext/deriving.rs | 4 ++-- src/libsyntax/ext/pipes/ast_builder.rs | 2 +- src/libsyntax/fold.rs | 2 +- src/libsyntax/parse/attr.rs | 2 +- src/libsyntax/print/pprust.rs | 2 +- 14 files changed, 28 insertions(+), 27 deletions(-) diff --git a/src/librustc/driver/session.rs b/src/librustc/driver/session.rs index bd35bf50cefc0..94dbfb51eb822 100644 --- a/src/librustc/driver/session.rs +++ b/src/librustc/driver/session.rs @@ -358,7 +358,7 @@ pub mod test { pub fn make_crate_type_attr(+t: ~str) -> ast::attribute { codemap::respan(codemap::dummy_sp(), ast::attribute_ { style: ast::attr_outer, - value: codemap::respan(codemap::dummy_sp(), + value: @codemap::respan(codemap::dummy_sp(), ast::meta_name_value( @~"crate_type", codemap::respan(codemap::dummy_sp(), diff --git a/src/librustc/front/core_inject.rs b/src/librustc/front/core_inject.rs index 51b25854bc751..62a2dfd880b46 100644 --- a/src/librustc/front/core_inject.rs +++ b/src/librustc/front/core_inject.rs @@ -50,7 +50,7 @@ fn inject_libcore_ref(sess: Session, attrs: ~[ spanned(ast::attribute_ { style: ast::attr_inner, - value: spanned(ast::meta_name_value( + value: @spanned(ast::meta_name_value( @~"vers", spanned(ast::lit_str(@CORE_VERSION.to_str())) )), diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index ca55c8a407276..5d74d03e27b1a 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -994,7 +994,7 @@ fn get_attributes(md: ebml::Doc) -> ~[ast::attribute] { codemap::spanned { node: ast::attribute_ { style: ast::attr_outer, - value: /*bad*/copy *meta_item, + value: meta_item, is_sugared_doc: false, }, span: codemap::dummy_sp() diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index 86b07abffc2e9..c862a15dbc7a4 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -1020,7 +1020,7 @@ fn write_int(writer: io::Writer, &&n: int) { writer.write_be_u32(n as u32); } -fn encode_meta_item(ebml_w: writer::Encoder, mi: meta_item) { +fn encode_meta_item(ebml_w: writer::Encoder, mi: @meta_item) { match mi.node { meta_word(name) => { ebml_w.start_tag(tag_meta_item_word); @@ -1050,7 +1050,7 @@ fn encode_meta_item(ebml_w: writer::Encoder, mi: meta_item) { ebml_w.writer.write(str::to_bytes(*name)); ebml_w.end_tag(); for items.each |inner_item| { - encode_meta_item(ebml_w, **inner_item); + encode_meta_item(ebml_w, *inner_item); } ebml_w.end_tag(); } diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index b997c94a71b4d..586a4f29b7dad 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -316,7 +316,7 @@ struct LanguageItemCollector { impl LanguageItemCollector { fn match_and_collect_meta_item(&self, item_def_id: def_id, - meta_item: meta_item) { + meta_item: @meta_item) { match meta_item.node { meta_name_value(key, literal) => { match literal.node { @@ -368,10 +368,10 @@ impl LanguageItemCollector { visit_item: |item| { for item.attrs.each |attribute| { unsafe { - (*this).match_and_collect_meta_item(local_def(item - .id), - attribute.node - .value); + (*this).match_and_collect_meta_item( + local_def(item.id), + attribute.node.value + ); } } }, diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 6befb2f188042..4d071e4b26fbf 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -1142,7 +1142,7 @@ pub enum attr_style { attr_outer, attr_inner, } #[deriving_eq] pub struct attribute_ { style: attr_style, - value: meta_item, + value: @meta_item, is_sugared_doc: bool, } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 14ffb1cab5da4..3967ea3437b41 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -51,16 +51,17 @@ pub fn mk_word_item(name: @~str) -> @ast::meta_item { pub fn mk_attr(item: @ast::meta_item) -> ast::attribute { dummy_spanned(ast::attribute_ { style: ast::attr_inner, - value: *item, + value: item, is_sugared_doc: false }) } -pub fn mk_sugared_doc_attr(text: ~str, +pub fn mk_sugared_doc_attr(+text: ~str, +lo: BytePos, +hi: BytePos) -> ast::attribute { + let style = doc_comment_style(text); let lit = spanned(lo, hi, ast::lit_str(@text)); let attr = ast::attribute_ { - style: doc_comment_style(text), - value: spanned(lo, hi, ast::meta_name_value(@~"doc", lit)), + style: style, + value: @spanned(lo, hi, ast::meta_name_value(@~"doc", lit)), is_sugared_doc: true }; spanned(lo, hi, attr) @@ -69,7 +70,7 @@ pub fn mk_sugared_doc_attr(text: ~str, /* Conversion */ pub fn attr_meta(attr: ast::attribute) -> @ast::meta_item { - @attr.node.value + attr.node.value } // Get the meta_items from inside a vector of attributes @@ -79,7 +80,7 @@ pub fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] { pub fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute { if attr.node.is_sugared_doc { - let comment = get_meta_item_value_str(@attr.node.value).get(); + let comment = get_meta_item_value_str(attr.node.value).get(); let meta = mk_name_value_item_str(@~"doc", @strip_doc_comment_decoration(*comment)); mk_attr(meta) @@ -91,7 +92,7 @@ pub fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute { /* Accessors */ pub pure fn get_attr_name(attr: &ast::attribute) -> @~str { - get_meta_item_name(@attr.node.value) + get_meta_item_name(attr.node.value) } pub pure fn get_meta_item_name(meta: @ast::meta_item) -> @~str { diff --git a/src/libsyntax/ext/auto_encode.rs b/src/libsyntax/ext/auto_encode.rs index 7fbba987cc7df..c2d1d82833d26 100644 --- a/src/libsyntax/ext/auto_encode.rs +++ b/src/libsyntax/ext/auto_encode.rs @@ -110,7 +110,7 @@ mod syntax { pub fn expand_auto_encode( cx: ext_ctxt, span: span, - _mitem: ast::meta_item, + _mitem: @ast::meta_item, in_items: ~[@ast::item] ) -> ~[@ast::item] { fn is_auto_encode(a: &ast::attribute) -> bool { @@ -165,7 +165,7 @@ pub fn expand_auto_encode( pub fn expand_auto_decode( cx: ext_ctxt, span: span, - _mitem: ast::meta_item, + _mitem: @ast::meta_item, in_items: ~[@ast::item] ) -> ~[@ast::item] { fn is_auto_decode(a: &ast::attribute) -> bool { diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index f3a74302400c9..2f35e9ed97864 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -38,7 +38,7 @@ pub struct MacroDef { } pub type ItemDecorator = - fn@(ext_ctxt, span, ast::meta_item, ~[@ast::item]) -> ~[@ast::item]; + fn@(ext_ctxt, span, @ast::meta_item, ~[@ast::item]) -> ~[@ast::item]; pub struct SyntaxExpanderTT { expander: SyntaxExpanderTTFun, diff --git a/src/libsyntax/ext/deriving.rs b/src/libsyntax/ext/deriving.rs index 094eea81fd2fd..4942558f8bc34 100644 --- a/src/libsyntax/ext/deriving.rs +++ b/src/libsyntax/ext/deriving.rs @@ -58,7 +58,7 @@ type ExpandDerivingEnumDefFn = &fn(ext_ctxt, pub fn expand_deriving_eq(cx: ext_ctxt, span: span, - _mitem: meta_item, + _mitem: @meta_item, in_items: ~[@item]) -> ~[@item] { expand_deriving(cx, @@ -70,7 +70,7 @@ pub fn expand_deriving_eq(cx: ext_ctxt, pub fn expand_deriving_iter_bytes(cx: ext_ctxt, span: span, - _mitem: meta_item, + _mitem: @meta_item, in_items: ~[@item]) -> ~[@item] { expand_deriving(cx, diff --git a/src/libsyntax/ext/pipes/ast_builder.rs b/src/libsyntax/ext/pipes/ast_builder.rs index 49f7fe5853e7b..cc8435947097c 100644 --- a/src/libsyntax/ext/pipes/ast_builder.rs +++ b/src/libsyntax/ext/pipes/ast_builder.rs @@ -227,7 +227,7 @@ pub impl ext_ctxt_ast_builder for ext_ctxt { // Rust coding conventions let non_camel_case_attribute = respan(dummy_sp(), ast::attribute_ { style: ast::attr_outer, - value: respan(dummy_sp(), + value: @respan(dummy_sp(), ast::meta_list(@~"allow", ~[ @respan(dummy_sp(), ast::meta_word( diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index dacb6f60e3764..eaf29d40c3b78 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -98,7 +98,7 @@ fn fold_attribute_(at: attribute, fld: ast_fold) -> attribute { spanned { node: ast::attribute_ { style: at.node.style, - value: *fold_meta_item_(@at.node.value, fld), + value: fold_meta_item_(at.node.value, fld), is_sugared_doc: at.node.is_sugared_doc, }, span: fld.new_span(at.span), diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index c0c97a0b9eb59..87ecf6a95676b 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -72,7 +72,7 @@ impl parser_attr for Parser { self.expect(token::RBRACKET); let mut hi = self.span.hi; return spanned(lo, hi, ast::attribute_ { style: style, - value: *meta_item, + value: meta_item, is_sugared_doc: false }); } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index d5a09e087a02e..6230b0465dcc2 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -904,7 +904,7 @@ pub fn print_attribute(s: @ps, attr: ast::attribute) { word(s.s, *comment); } else { word(s.s, ~"#["); - print_meta_item(s, @attr.node.value); + print_meta_item(s, attr.node.value); word(s.s, ~"]"); } } From 44f5537abf31ac2f931f343a22a85432feb619e1 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 17 Feb 2013 08:28:11 -0800 Subject: [PATCH 02/30] libsyntax: add explicit modes where required to copy strs/vecs --- src/libsyntax/attr.rs | 6 +-- src/libsyntax/ext/auto_encode.rs | 51 ++++++++++-------- src/libsyntax/ext/pipes/ast_builder.rs | 74 ++++++++++++++------------ src/libsyntax/ext/pipes/pipec.rs | 5 +- src/libsyntax/ext/pipes/proto.rs | 6 +-- src/libsyntax/ext/source_util.rs | 40 +++++++++----- 6 files changed, 106 insertions(+), 76 deletions(-) diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 3967ea3437b41..01b890fdeeb9b 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -209,7 +209,7 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool { pub fn contains_name(metas: &[@ast::meta_item], name: &str) -> bool { let matches = find_meta_items_by_name(metas, name); - return vec::len(matches) > 0u; + matches.len() > 0u } pub fn attrs_contains_name(attrs: &[ast::attribute], name: &str) -> bool { @@ -227,14 +227,14 @@ pub fn first_attr_value_str_by_name(attrs: ~[ast::attribute], name: &str) } } -fn last_meta_item_by_name(items: ~[@ast::meta_item], name: &str) +fn last_meta_item_by_name(items: &[@ast::meta_item], name: &str) -> Option<@ast::meta_item> { let items = attr::find_meta_items_by_name(items, name); vec::last_opt(items) } -pub fn last_meta_item_value_str_by_name(items: ~[@ast::meta_item], name: &str) +pub fn last_meta_item_value_str_by_name(items: &[@ast::meta_item], name: &str) -> Option<@~str> { match last_meta_item_by_name(items, name) { diff --git a/src/libsyntax/ext/auto_encode.rs b/src/libsyntax/ext/auto_encode.rs index c2d1d82833d26..27e76a8ce7a3b 100644 --- a/src/libsyntax/ext/auto_encode.rs +++ b/src/libsyntax/ext/auto_encode.rs @@ -120,7 +120,7 @@ pub fn expand_auto_encode( fn filter_attrs(item: @ast::item) -> @ast::item { @ast::item { attrs: item.attrs.filtered(|a| !is_auto_encode(a)), - .. *item + .. copy *item } } @@ -175,7 +175,7 @@ pub fn expand_auto_decode( fn filter_attrs(item: @ast::item) -> @ast::item { @ast::item { attrs: item.attrs.filtered(|a| !is_auto_decode(a)), - .. *item + .. copy *item } } @@ -237,7 +237,7 @@ priv impl ext_ctxt { } } - fn expr(span: span, node: ast::expr_) -> @ast::expr { + fn expr(span: span, +node: ast::expr_) -> @ast::expr { @ast::expr { id: self.next_id(), callee_id: self.next_id(), @@ -246,7 +246,7 @@ priv impl ext_ctxt { } } - fn path(span: span, strs: ~[ast::ident]) -> @ast::path { + fn path(span: span, +strs: ~[ast::ident]) -> @ast::path { @ast::path { span: span, global: false, @@ -256,7 +256,7 @@ priv impl ext_ctxt { } } - fn path_global(span: span, strs: ~[ast::ident]) -> @ast::path { + fn path_global(span: span, +strs: ~[ast::ident]) -> @ast::path { @ast::path { span: span, global: true, @@ -266,8 +266,11 @@ priv impl ext_ctxt { } } - fn path_tps(span: span, strs: ~[ast::ident], - tps: ~[@ast::Ty]) -> @ast::path { + fn path_tps( + span: span, + +strs: ~[ast::ident], + +tps: ~[@ast::Ty] + ) -> @ast::path { @ast::path { span: span, global: false, @@ -277,8 +280,11 @@ priv impl ext_ctxt { } } - fn path_tps_global(span: span, strs: ~[ast::ident], - tps: ~[@ast::Ty]) -> @ast::path { + fn path_tps_global( + span: span, + +strs: ~[ast::ident], + +tps: ~[@ast::Ty] + ) -> @ast::path { @ast::path { span: span, global: true, @@ -288,8 +294,11 @@ priv impl ext_ctxt { } } - fn ty_path(span: span, strs: ~[ast::ident], - tps: ~[@ast::Ty]) -> @ast::Ty { + fn ty_path( + span: span, + +strs: ~[ast::ident], + +tps: ~[@ast::Ty] + ) -> @ast::Ty { @ast::Ty { id: self.next_id(), node: ast::ty_path( @@ -335,13 +344,13 @@ priv impl ext_ctxt { span: span})) } - fn lambda(blk: ast::blk) -> @ast::expr { + fn lambda(+blk: ast::blk) -> @ast::expr { let ext_cx = self; let blk_e = self.expr(blk.span, ast::expr_block(blk)); quote_expr!( || $blk_e ) } - fn blk(span: span, stmts: ~[@ast::stmt]) -> ast::blk { + fn blk(span: span, +stmts: ~[@ast::stmt]) -> ast::blk { codemap::spanned { node: ast::blk_ { view_items: ~[], @@ -367,15 +376,15 @@ priv impl ext_ctxt { } } - fn expr_path(span: span, strs: ~[ast::ident]) -> @ast::expr { + fn expr_path(span: span, +strs: ~[ast::ident]) -> @ast::expr { self.expr(span, ast::expr_path(self.path(span, strs))) } - fn expr_path_global(span: span, strs: ~[ast::ident]) -> @ast::expr { + fn expr_path_global(span: span, +strs: ~[ast::ident]) -> @ast::expr { self.expr(span, ast::expr_path(self.path_global(span, strs))) } - fn expr_var(span: span, var: ~str) -> @ast::expr { + fn expr_var(span: span, +var: ~str) -> @ast::expr { self.expr_path(span, ~[self.ident_of(var)]) } @@ -390,7 +399,7 @@ priv impl ext_ctxt { fn expr_call( span: span, expr: @ast::expr, - args: ~[@ast::expr] + +args: ~[@ast::expr] ) -> @ast::expr { self.expr(span, ast::expr_call(expr, args, ast::NoSugar)) } @@ -399,7 +408,7 @@ priv impl ext_ctxt { self.lambda(self.expr_blk(expr)) } - fn lambda_stmts(span: span, stmts: ~[@ast::stmt]) -> @ast::expr { + fn lambda_stmts(span: span, +stmts: ~[@ast::stmt]) -> @ast::expr { self.lambda(self.blk(span, stmts)) } } @@ -545,7 +554,7 @@ fn mk_deser_impl( fn mk_ser_method( cx: ext_ctxt, span: span, - ser_body: ast::blk + +ser_body: ast::blk ) -> @ast::method { let ty_s = @ast::Ty { id: cx.next_id(), @@ -609,7 +618,7 @@ fn mk_deser_method( cx: ext_ctxt, span: span, ty: @ast::Ty, - deser_body: ast::blk + +deser_body: ast::blk ) -> @ast::method { let ty_d = @ast::Ty { id: cx.next_id(), @@ -947,7 +956,7 @@ fn mk_enum_ser_body( cx: ext_ctxt, span: span, name: ast::ident, - variants: ~[ast::variant] + +variants: ~[ast::variant] ) -> @ast::expr { let arms = do variants.mapi |v_idx, variant| { match variant.node.kind { diff --git a/src/libsyntax/ext/pipes/ast_builder.rs b/src/libsyntax/ext/pipes/ast_builder.rs index cc8435947097c..ed167a8e2a754 100644 --- a/src/libsyntax/ext/pipes/ast_builder.rs +++ b/src/libsyntax/ext/pipes/ast_builder.rs @@ -33,7 +33,7 @@ mod syntax { pub use parse; } -pub fn path(ids: ~[ident], span: span) -> @ast::path { +pub fn path(+ids: ~[ident], span: span) -> @ast::path { @ast::path { span: span, global: false, idents: ids, @@ -41,7 +41,7 @@ pub fn path(ids: ~[ident], span: span) -> @ast::path { types: ~[] } } -pub fn path_global(ids: ~[ident], span: span) -> @ast::path { +pub fn path_global(+ids: ~[ident], span: span) -> @ast::path { @ast::path { span: span, global: true, idents: ids, @@ -50,19 +50,23 @@ pub fn path_global(ids: ~[ident], span: span) -> @ast::path { } pub trait append_types { - fn add_ty(ty: @ast::Ty) -> @ast::path; - fn add_tys(+tys: ~[@ast::Ty]) -> @ast::path; + fn add_ty(&self, ty: @ast::Ty) -> @ast::path; + fn add_tys(&self, +tys: ~[@ast::Ty]) -> @ast::path; } pub impl append_types for @ast::path { - fn add_ty(ty: @ast::Ty) -> @ast::path { - @ast::path { types: vec::append_one(self.types, ty), - .. *self} + fn add_ty(&self, ty: @ast::Ty) -> @ast::path { + @ast::path { + types: vec::append_one(copy self.types, ty), + .. **self + } } - fn add_tys(+tys: ~[@ast::Ty]) -> @ast::path { - @ast::path { types: vec::append(self.types, tys), - .. *self} + fn add_tys(&self, +tys: ~[@ast::Ty]) -> @ast::path { + @ast::path { + types: vec::append(copy self.types, tys), + .. **self + } } } @@ -73,24 +77,28 @@ pub trait ext_ctxt_ast_builder { fn expr_block(&self, e: @ast::expr) -> ast::blk; fn fn_decl(&self, +inputs: ~[ast::arg], output: @ast::Ty) -> ast::fn_decl; fn item(&self, name: ident, span: span, +node: ast::item_) -> @ast::item; - fn item_fn_poly(&self, name: ident, - +inputs: ~[ast::arg], - output: @ast::Ty, - +ty_params: ~[ast::ty_param], - +body: ast::blk) -> @ast::item; - fn item_fn(&self, name: ident, - +inputs: ~[ast::arg], - output: @ast::Ty, - +body: ast::blk) -> @ast::item; - fn item_enum_poly(&self, name: ident, - span: span, - +enum_definition: ast::enum_def, - +ty_params: ~[ast::ty_param]) -> @ast::item; + fn item_fn_poly(&self, + name: ident, + +inputs: ~[ast::arg], + output: @ast::Ty, + +ty_params: ~[ast::ty_param], + +body: ast::blk) -> @ast::item; + fn item_fn(&self, + name: ident, + +inputs: ~[ast::arg], + output: @ast::Ty, + +body: ast::blk) -> @ast::item; + fn item_enum_poly(&self, + name: ident, + span: span, + +enum_definition: ast::enum_def, + +ty_params: ~[ast::ty_param]) -> @ast::item; fn item_enum(&self, name: ident, span: span, +enum_definition: ast::enum_def) -> @ast::item; - fn item_struct_poly(&self, name: ident, span: span, - struct_def: ast::struct_def, - ty_params: ~[ast::ty_param]) -> @ast::item; + fn item_struct_poly(&self, + name: ident, span: span, + struct_def: ast::struct_def, + +ty_params: ~[ast::ty_param]) -> @ast::item; fn item_struct(&self, name: ident, span: span, struct_def: ast::struct_def) -> @ast::item; fn struct_expr(&self, path: @ast::path, @@ -105,14 +113,14 @@ pub trait ext_ctxt_ast_builder { ty: @ast::Ty, +params: ~[ast::ty_param]) -> @ast::item; fn item_ty(&self, name: ident, span: span, ty: @ast::Ty) -> @ast::item; - fn ty_vars(&self, +ty_params: ~[ast::ty_param]) -> ~[@ast::Ty]; - fn ty_vars_global(&self, +ty_params: ~[ast::ty_param]) -> ~[@ast::Ty]; + fn ty_vars(&self, ty_params: &[ast::ty_param]) -> ~[@ast::Ty]; + fn ty_vars_global(&self, ty_params: &[ast::ty_param]) -> ~[@ast::Ty]; fn ty_field_imm(&self, name: ident, ty: @ast::Ty) -> ast::ty_field; fn field_imm(&self, name: ident, e: @ast::expr) -> ast::field; fn block(&self, +stmts: ~[@ast::stmt], e: @ast::expr) -> ast::blk; fn stmt_let(&self, ident: ident, e: @ast::expr) -> @ast::stmt; fn stmt_expr(&self, e: @ast::expr) -> @ast::stmt; - fn block_expr(&self, b: ast::blk) -> @ast::expr; + fn block_expr(&self, +b: ast::blk) -> @ast::expr; fn ty_option(&self, ty: @ast::Ty) -> @ast::Ty; fn ty_infer(&self) -> @ast::Ty; fn ty_nil_ast_builder(&self) -> @ast::Ty; @@ -128,7 +136,7 @@ pub impl ext_ctxt_ast_builder for ext_ctxt { ], dummy_sp()).add_ty(ty)) } - fn block_expr(&self, b: ast::blk) -> @ast::expr { + fn block_expr(&self, +b: ast::blk) -> @ast::expr { @expr { id: self.next_id(), callee_id: self.next_id(), @@ -282,7 +290,7 @@ pub impl ext_ctxt_ast_builder for ext_ctxt { fn item_struct_poly(&self, name: ident, span: span, struct_def: ast::struct_def, - ty_params: ~[ast::ty_param]) -> @ast::item { + +ty_params: ~[ast::ty_param]) -> @ast::item { self.item(name, span, ast::item_struct(@struct_def, ty_params)) } @@ -386,12 +394,12 @@ pub impl ext_ctxt_ast_builder for ext_ctxt { self.item_ty_poly(name, span, ty, ~[]) } - fn ty_vars(&self, +ty_params: ~[ast::ty_param]) -> ~[@ast::Ty] { + fn ty_vars(&self, +ty_params: &[ast::ty_param]) -> ~[@ast::Ty] { ty_params.map(|p| self.ty_path_ast_builder( path(~[p.ident], dummy_sp()))) } - fn ty_vars_global(&self, +ty_params: ~[ast::ty_param]) -> ~[@ast::Ty] { + fn ty_vars_global(&self, ty_params: &[ast::ty_param]) -> ~[@ast::Ty] { ty_params.map(|p| self.ty_path_ast_builder( path(~[p.ident], dummy_sp()))) } diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 444b09d9ae458..16de2a20668cf 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -58,8 +58,7 @@ pub impl gen_send for message { let next = this.proto.get_state(next_state.state); assert next_state.tys.len() == next.ty_params.len(); let arg_names = tys.mapi(|i, _ty| cx.ident_of(~"x_"+i.to_str())); - - let args_ast = (arg_names, *tys).map(|n, t| cx.arg(*n, *t)); + let args_ast = vec::map2(arg_names, *tys, |n, t| cx.arg(*n, *t)); let pipe_ty = cx.ty_path_ast_builder( path(~[this.data_name()], span) @@ -137,7 +136,7 @@ pub impl gen_send for message { debug!("pipec: no next state"); let arg_names = tys.mapi(|i, _ty| (~"x_" + i.to_str())); - let args_ast = do (arg_names, *tys).map |n, t| { + let args_ast = do vec::map2(arg_names, *tys) |n, t| { cx.arg(cx.ident_of(*n), *t) }; diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index 7c6dc1f937dca..831b1b79ff86c 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -120,11 +120,11 @@ pub impl state_ { pub type protocol = @mut protocol_; -pub fn protocol(name: ~str, +span: span) -> protocol { +pub fn protocol(+name: ~str, +span: span) -> protocol { @mut protocol_(name, span) } -pub fn protocol_(name: ~str, span: span) -> protocol_ { +pub fn protocol_(+name: ~str, span: span) -> protocol_ { protocol_ { name: name, span: span, @@ -174,7 +174,7 @@ pub impl protocol_ { } pub impl protocol { - fn add_state_poly(&self, name: ~str, ident: ast::ident, dir: direction, + fn add_state_poly(&self, +name: ~str, ident: ast::ident, dir: direction, +ty_params: ~[ast::ty_param]) -> state { let messages = @mut ~[]; diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 26c38c945c7f1..f1b1602d0a645 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -23,19 +23,33 @@ use core::str; use core::vec; fn topmost_expn_info(expn_info: @codemap::ExpnInfo) -> @codemap::ExpnInfo { - let ExpandedFrom(CallInfo { call_site, _ }) = *expn_info; - match call_site.expn_info { - Some(next_expn_info) => { - let ExpandedFrom(CallInfo { - callee: NameAndSpan {name, _}, - _ - }) = *next_expn_info; - // Don't recurse into file using "include!" - if name == ~"include" { return expn_info; } - - topmost_expn_info(next_expn_info) - }, - None => expn_info + // FIXME(#3874): this would be better written as: + // let @ExpandedFrom(CallInfo { + // call_site: ref call_site, + // _ + // }) = expn_info; + match *expn_info { + ExpandedFrom(CallInfo { call_site: ref call_site, _}) => { + match call_site.expn_info { + Some(next_expn_info) => { + // Don't recurse into file using "include!" + match *next_expn_info { + ExpandedFrom( + CallInfo { callee: NameAndSpan { + name: ref name, + _ + }, + _ + }) => { + if *name == ~"include" { return expn_info; } + } + } + + topmost_expn_info(next_expn_info) + }, + None => expn_info + } + } } } From 297c6e11bb362901b5a511057497a80eb255d2b4 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 17 Feb 2013 09:42:50 -0800 Subject: [PATCH 03/30] libsyntax: minor cleanup --- src/libsyntax/visit.rs | 47 +++++++++++++++++++++--------------------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 3701607ffc13f..e334df15073f6 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -566,29 +566,30 @@ pub type simple_visitor = @SimpleVisitor; pub fn simple_ignore_ty(_t: @Ty) {} pub fn default_simple_visitor() -> @SimpleVisitor { - return @SimpleVisitor {visit_mod: |_m: _mod, _sp: span, _id: node_id| { }, - visit_view_item: |_vi: @view_item| { }, - visit_foreign_item: |_ni: @foreign_item| { }, - visit_item: |_i: @item| { }, - visit_local: |_l: @local| { }, - visit_block: |_b: ast::blk| { }, - visit_stmt: |_s: @stmt| { }, - visit_arm: |_a: arm| { }, - visit_pat: |_p: @pat| { }, - visit_decl: |_d: @decl| { }, - visit_expr: |_e: @expr| { }, - visit_expr_post: |_e: @expr| { }, - visit_ty: simple_ignore_ty, - visit_ty_params: fn@(_ps: ~[ty_param]) {}, - visit_fn: fn@(_fk: fn_kind, _d: fn_decl, _b: blk, _sp: span, - _id: node_id) { }, - visit_ty_method: fn@(_m: ty_method) { }, - visit_trait_method: fn@(_m: trait_method) { }, - visit_struct_def: fn@(_sd: @struct_def, _nm: ident, - _tps: ~[ty_param], _id: node_id) { }, - visit_struct_field: fn@(_f: @struct_field) { }, - visit_struct_method: fn@(_m: @method) { } - }; + @SimpleVisitor { + visit_mod: |_m, _sp, _id| { }, + visit_view_item: |_vi| { }, + visit_foreign_item: |_ni| { }, + visit_item: |_i| { }, + visit_local: |_l| { }, + visit_block: |_b| { }, + visit_stmt: |_s| { }, + visit_arm: |_a| { }, + visit_pat: |_p| { }, + visit_decl: |_d| { }, + visit_expr: |_e| { }, + visit_expr_post: |_e| { }, + visit_ty: simple_ignore_ty, + visit_ty_params: fn@(_ps: ~[ty_param]) { }, + visit_fn: fn@(_fk: fn_kind, _d: fn_decl, _b: blk, _sp: span, + _id: node_id) { }, + visit_ty_method: fn@(_m: ty_method) { }, + visit_trait_method: fn@(_m: trait_method) { }, + visit_struct_def: fn@(_sd: @struct_def, _nm: ident, + _tps: ~[ty_param], _id: node_id) { }, + visit_struct_field: fn@(_f: @struct_field) { }, + visit_struct_method: fn@(_m: @method) { } + } } pub fn mk_simple_visitor(v: simple_visitor) -> vt<()> { From 9ac5262bdf27c964d1e9496479994aff2ae80acc Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Mon, 18 Feb 2013 21:25:44 -0800 Subject: [PATCH 04/30] libsyntax: convert visit to pass ty_params by reference --- src/librustc/middle/borrowck/check_loans.rs | 6 +- src/librustc/middle/borrowck/gather_loans.rs | 4 +- src/librustc/middle/check_match.rs | 2 +- src/librustc/middle/freevars.rs | 2 +- src/librustc/middle/kind.rs | 2 +- src/librustc/middle/lint.rs | 4 +- src/librustc/middle/liveness.rs | 8 +- src/librustc/middle/region.rs | 8 +- src/librustc/middle/resolve.rs | 6 +- src/librustc/middle/trans/base.rs | 8 +- src/librustc/middle/trans/reachable.rs | 17 +- src/librustc/middle/typeck/check/mod.rs | 2 +- src/librustc/middle/typeck/coherence.rs | 2 +- src/libsyntax/ast_map.rs | 162 ++++++----- src/libsyntax/ast_util.rs | 45 ++- src/libsyntax/attr.rs | 4 +- src/libsyntax/ext/base.rs | 6 +- src/libsyntax/ext/concat_idents.rs | 2 +- src/libsyntax/ext/env.rs | 2 +- src/libsyntax/ext/fmt.rs | 4 +- src/libsyntax/ext/log_syntax.rs | 6 +- src/libsyntax/ext/quote.rs | 22 +- src/libsyntax/ext/source_util.rs | 16 +- src/libsyntax/ext/trace_macros.rs | 14 +- src/libsyntax/ext/tt/macro_rules.rs | 13 +- src/libsyntax/visit.rs | 271 ++++++++++--------- 26 files changed, 354 insertions(+), 284 deletions(-) diff --git a/src/librustc/middle/borrowck/check_loans.rs b/src/librustc/middle/borrowck/check_loans.rs index afefec00c5040..0c043dac2167f 100644 --- a/src/librustc/middle/borrowck/check_loans.rs +++ b/src/librustc/middle/borrowck/check_loans.rs @@ -579,7 +579,7 @@ impl CheckLoanCtxt { } } -fn check_loans_in_fn(fk: visit::fn_kind, +fn check_loans_in_fn(fk: &visit::fn_kind, decl: ast::fn_decl, body: ast::blk, sp: span, @@ -590,7 +590,7 @@ fn check_loans_in_fn(fk: visit::fn_kind, let fty = ty::node_id_to_type(self.tcx(), id); let declared_purity; - match fk { + match *fk { visit::fk_item_fn(*) | visit::fk_method(*) | visit::fk_dtor(*) => { declared_purity = ty::ty_fn_purity(fty); @@ -611,7 +611,7 @@ fn check_loans_in_fn(fk: visit::fn_kind, do save_and_restore_managed(self.fn_args) { *self.declared_purity = declared_purity; - match fk { + match *fk { visit::fk_anon(*) | visit::fk_fn_block(*) if is_stack_closure => { // inherits the fn_args from enclosing ctxt diff --git a/src/librustc/middle/borrowck/gather_loans.rs b/src/librustc/middle/borrowck/gather_loans.rs index 546e9359a32d9..1b9c935cf7501 100644 --- a/src/librustc/middle/borrowck/gather_loans.rs +++ b/src/librustc/middle/borrowck/gather_loans.rs @@ -95,7 +95,7 @@ pub fn gather_loans(bccx: @BorrowckCtxt, crate: @ast::crate) -> ReqMaps { return glcx.req_maps; } -fn req_loans_in_fn(fk: visit::fn_kind, +fn req_loans_in_fn(fk: &visit::fn_kind, decl: ast::fn_decl, body: ast::blk, sp: span, @@ -107,7 +107,7 @@ fn req_loans_in_fn(fk: visit::fn_kind, let old_root_ub = self.root_ub; self.root_ub = body.node.id; - match fk { + match *fk { visit::fk_anon(*) | visit::fk_fn_block(*) => {} visit::fk_item_fn(*) | visit::fk_method(*) | visit::fk_dtor(*) => { diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index 85ed4e74efb71..ce44ef9c07175 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -669,7 +669,7 @@ pub fn check_local(cx: @MatchCheckCtxt, } pub fn check_fn(cx: @MatchCheckCtxt, - kind: visit::fn_kind, + kind: &visit::fn_kind, decl: fn_decl, body: blk, sp: span, diff --git a/src/librustc/middle/freevars.rs b/src/librustc/middle/freevars.rs index 2c39e41c57bc4..81c4b1ae70c99 100644 --- a/src/librustc/middle/freevars.rs +++ b/src/librustc/middle/freevars.rs @@ -100,7 +100,7 @@ pub fn annotate_freevars(def_map: resolve::DefMap, crate: @ast::crate) -> freevar_map { let freevars = HashMap(); - let walk_fn = fn@(_fk: visit::fn_kind, _decl: ast::fn_decl, + let walk_fn = fn@(_fk: &visit::fn_kind, _decl: ast::fn_decl, blk: ast::blk, _sp: span, nid: ast::node_id) { let vars = collect_freevars(def_map, blk); freevars.insert(nid, vars); diff --git a/src/librustc/middle/kind.rs b/src/librustc/middle/kind.rs index b08adef88c98c..2d87f0419799c 100644 --- a/src/librustc/middle/kind.rs +++ b/src/librustc/middle/kind.rs @@ -153,7 +153,7 @@ fn with_appropriate_checker(cx: Context, id: node_id, b: fn(check_fn)) { // Check that the free variables used in a shared/sendable closure conform // to the copy/move kind bounds. Then recursively check the function body. -fn check_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, sp: span, +fn check_fn(fk: &visit::fn_kind, decl: fn_decl, body: blk, sp: span, fn_id: node_id, cx: Context, v: visit::vt) { // Check kinds on free variables: diff --git a/src/librustc/middle/lint.rs b/src/librustc/middle/lint.rs index 0a06808b63356..95be98be77680 100644 --- a/src/librustc/middle/lint.rs +++ b/src/librustc/middle/lint.rs @@ -923,13 +923,13 @@ fn check_item_non_camel_case_types(cx: ty::ctxt, it: @ast::item) { } } -fn check_fn(tcx: ty::ctxt, fk: visit::fn_kind, decl: ast::fn_decl, +fn check_fn(tcx: ty::ctxt, fk: &visit::fn_kind, decl: ast::fn_decl, _body: ast::blk, span: span, id: ast::node_id) { debug!("lint check_fn fk=%? id=%?", fk, id); // don't complain about blocks, since they tend to get their modes // specified from the outside - match fk { + match *fk { visit::fk_fn_block(*) => { return; } _ => {} } diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 75bf7cf26091c..739882258b2d7 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -435,7 +435,7 @@ impl IrMaps { } } -fn visit_fn(fk: visit::fn_kind, +fn visit_fn(fk: &visit::fn_kind, decl: fn_decl, body: blk, sp: span, @@ -465,7 +465,7 @@ fn visit_fn(fk: visit::fn_kind, }; // Add `self`, whether explicit or implicit. - match fk { + match *fk { fk_method(_, _, method) => { match method.self_ty.node { sty_by_ref => { @@ -1639,7 +1639,7 @@ fn check_expr(expr: @expr, &&self: @Liveness, vt: vt<@Liveness>) { } } -fn check_fn(_fk: visit::fn_kind, _decl: fn_decl, +fn check_fn(_fk: &visit::fn_kind, _decl: fn_decl, _body: blk, _sp: span, _id: node_id, &&_self: @Liveness, _v: vt<@Liveness>) { // do not check contents of nested fns @@ -1653,7 +1653,7 @@ enum ReadKind { } impl @Liveness { - fn check_ret(&self, id: node_id, sp: span, _fk: visit::fn_kind, + fn check_ret(&self, id: node_id, sp: span, _fk: &visit::fn_kind, entry_ln: LiveNode) { if self.live_on_entry(entry_ln, self.s.no_ret_var).is_some() { // if no_ret_var is live, then we fall off the end of the diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 5b2d3c9bb96cd..7782a58356b30 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -316,14 +316,14 @@ pub fn resolve_item(item: @ast::item, cx: ctxt, visitor: visit::vt) { visit::visit_item(item, new_cx, visitor); } -pub fn resolve_fn(fk: visit::fn_kind, +pub fn resolve_fn(fk: &visit::fn_kind, decl: ast::fn_decl, body: ast::blk, sp: span, id: ast::node_id, cx: ctxt, visitor: visit::vt) { - let fn_cx = match fk { + let fn_cx = match *fk { visit::fk_item_fn(*) | visit::fk_method(*) | visit::fk_dtor(*) => { // Top-level functions are a root scope. @@ -337,7 +337,7 @@ pub fn resolve_fn(fk: visit::fn_kind, }; // Record the ID of `self`. - match fk { + match *fk { visit::fk_method(_, _, method) => { cx.region_map.insert(method.self_id, body.node.id); } @@ -607,7 +607,7 @@ pub fn determine_rp_in_item(item: @ast::item, } } -pub fn determine_rp_in_fn(fk: visit::fn_kind, +pub fn determine_rp_in_fn(fk: &visit::fn_kind, decl: ast::fn_decl, body: ast::blk, _: span, diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index 689d6ca40eefe..afcc0298a95d6 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -1079,7 +1079,7 @@ pub impl Resolver { let privacy = visibility_to_privacy(item.vis); match /*bad*/copy item.node { - item_mod(module_) => { + item_mod(ref module_) => { let (name_bindings, new_parent) = self.add_child(ident, parent, ForbidDuplicateModules, sp); @@ -3725,7 +3725,7 @@ pub impl Resolver { visitor); } - item_mod(module_) => { + item_mod(ref module_) => { do self.with_scope(Some(item.ident)) { self.resolve_module(module_, item.span, item.ident, item.id, visitor); @@ -4098,7 +4098,7 @@ pub impl Resolver { } fn resolve_module(@mut self, - module_: _mod, + module_: &_mod, span: span, _name: ident, id: node_id, diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index f5fa83e3fff3f..9723e2cda26b8 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -3008,8 +3008,12 @@ pub fn trans_crate(sess: session::Session, let symbol_hasher = @hash::default_state(); let link_meta = link::build_link_meta(sess, crate, output, symbol_hasher); - let reachable = reachable::find_reachable(crate.node.module, emap2, tcx, - maps.method_map); + let reachable = reachable::find_reachable( + &crate.node.module, + emap2, + tcx, + maps.method_map + ); // Append ".rc" to crate name as LLVM module identifier. // diff --git a/src/librustc/middle/trans/reachable.rs b/src/librustc/middle/trans/reachable.rs index d56d28c982d7d..b917021add224 100644 --- a/src/librustc/middle/trans/reachable.rs +++ b/src/librustc/middle/trans/reachable.rs @@ -38,7 +38,7 @@ struct ctx { rmap: map } -pub fn find_reachable(crate_mod: _mod, exp_map2: resolve::ExportMap2, +pub fn find_reachable(crate_mod: &_mod, exp_map2: resolve::ExportMap2, tcx: ty::ctxt, method_map: typeck::method_map) -> map { let rmap = HashMap(); let cx = ctx { @@ -85,10 +85,10 @@ fn traverse_def_id(cx: ctx, did: def_id) { } } -fn traverse_public_mod(cx: ctx, mod_id: node_id, m: _mod) { +fn traverse_public_mod(cx: ctx, mod_id: node_id, m: &_mod) { if !traverse_exports(cx, mod_id) { // No exports, so every local item is exported - for vec::each(m.items) |item| { + for m.items.each |item| { traverse_public_item(cx, *item); } } @@ -98,10 +98,10 @@ fn traverse_public_item(cx: ctx, item: @item) { if cx.rmap.contains_key(&item.id) { return; } cx.rmap.insert(item.id, ()); match /*bad*/copy item.node { - item_mod(m) => traverse_public_mod(cx, item.id, m), + item_mod(ref m) => traverse_public_mod(cx, item.id, m), item_foreign_mod(nm) => { if !traverse_exports(cx, item.id) { - for vec::each(nm.items) |item| { + for nm.items.each |item| { cx.rmap.insert(item.id, ()); } } @@ -224,9 +224,12 @@ fn traverse_inline_body(cx: ctx, body: blk) { })); } -fn traverse_all_resources_and_impls(cx: ctx, crate_mod: _mod) { +fn traverse_all_resources_and_impls(cx: ctx, crate_mod: &_mod) { visit::visit_mod( - crate_mod, codemap::dummy_sp(), 0, cx, + crate_mod, + codemap::dummy_sp(), + 0, + cx, visit::mk_vt(@visit::Visitor { visit_expr: |_e, _cx, _v| { }, visit_item: |i, cx, v| { diff --git a/src/librustc/middle/typeck/check/mod.rs b/src/librustc/middle/typeck/check/mod.rs index e63e46ace3d05..9d14030d2617a 100644 --- a/src/librustc/middle/typeck/check/mod.rs +++ b/src/librustc/middle/typeck/check/mod.rs @@ -469,7 +469,7 @@ pub fn check_fn(ccx: @mut CrateCtxt, }; // Don't descend into fns and items - fn visit_fn(_fk: visit::fn_kind, _decl: ast::fn_decl, + fn visit_fn(_fk: &visit::fn_kind, _decl: ast::fn_decl, _body: ast::blk, _sp: span, _id: ast::node_id, &&_t: (), _v: visit::vt<()>) { } diff --git a/src/librustc/middle/typeck/coherence.rs b/src/librustc/middle/typeck/coherence.rs index 29738f2826661..96cee42263022 100644 --- a/src/librustc/middle/typeck/coherence.rs +++ b/src/librustc/middle/typeck/coherence.rs @@ -628,7 +628,7 @@ pub impl CoherenceChecker { visit_crate(*crate, (), mk_vt(@Visitor { visit_item: |item, _context, visitor| { match /*bad*/copy item.node { - item_mod(module_) => { + item_mod(ref module_) => { // Then visit the module items. visit_mod(module_, item.span, item.id, (), visitor); } diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index 74f67808a5e97..8386f3c0498c3 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -118,7 +118,7 @@ pub struct Ctx { pub type vt = visit::vt<@mut Ctx>; pub fn extend(cx: @mut Ctx, +elt: ident) -> @path { - @(vec::append(cx.path, ~[path_name(elt)])) + @(vec::append(copy cx.path, ~[path_name(elt)])) } pub fn mk_ast_map_visitor() -> vt { @@ -160,7 +160,7 @@ pub fn map_decoded_item(diag: span_handler, // variables that are simultaneously in scope). let cx = @mut Ctx { map: @map, - path: path, + path: copy path, local_id: 0, diag: diag, }; @@ -173,10 +173,10 @@ pub fn map_decoded_item(diag: span_handler, ii_item(*) | ii_dtor(*) => { /* fallthrough */ } ii_foreign(i) => { cx.map.insert(i.id, node_foreign_item(i, foreign_abi_rust_intrinsic, - @path)); + @/*bad*/ copy path)); } ii_method(impl_did, m) => { - map_method(impl_did, @path, m, cx); + map_method(impl_did, @/*bad*/ copy path, m, cx); } } @@ -184,35 +184,45 @@ pub fn map_decoded_item(diag: span_handler, ii.accept(cx, v); } -pub fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, - sp: codemap::span, id: node_id, &&cx: @mut Ctx, v: vt) { +pub fn map_fn( + fk: &visit::fn_kind, + decl: fn_decl, + body: blk, + sp: codemap::span, + id: node_id, + &&cx: @mut Ctx, + v: visit::vt<@mut Ctx> +) { for decl.inputs.each |a| { cx.map.insert(a.id, - node_arg(/* FIXME (#2543) */ - copy *a, cx.local_id)); + node_arg(/* FIXME (#2543) */ copy *a, cx.local_id)); cx.local_id += 1u; } - match fk { - visit::fk_dtor(ref tps, ref attrs, self_id, parent_id) => { + match *fk { + visit::fk_dtor(tps, ref attrs, self_id, parent_id) => { let dt = @spanned { node: ast::struct_dtor_ { id: id, - attrs: (*attrs), + attrs: /* FIXME (#2543) */ vec::from_slice(*attrs), self_id: self_id, body: /* FIXME (#2543) */ copy body, }, span: sp, }; - cx.map.insert(id, node_dtor(/* FIXME (#2543) */ copy *tps, dt, - parent_id, - @/* FIXME (#2543) */ copy cx.path)); + cx.map.insert( + id, + node_dtor( + /* FIXME (#2543) */ vec::from_slice(tps), + dt, + parent_id, + @/* FIXME (#2543) */ copy cx.path)); } _ => () } visit::visit_fn(fk, decl, body, sp, id, cx, v); } -pub fn map_block(b: blk, &&cx: @mut Ctx, v: vt) { +pub fn map_block(b: blk, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { cx.map.insert(b.node.id, node_block(/* FIXME (#2543) */ copy b)); visit::visit_block(b, cx, v); } @@ -229,12 +239,12 @@ pub fn number_pat(cx: @mut Ctx, pat: @pat) { }; } -pub fn map_local(loc: @local, &&cx: @mut Ctx, v: vt) { +pub fn map_local(loc: @local, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { number_pat(cx, loc.node.pat); visit::visit_local(loc, cx, v); } -pub fn map_arm(arm: arm, &&cx: @mut Ctx, v: vt) { +pub fn map_arm(arm: arm, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { number_pat(cx, arm.pats[0]); visit::visit_arm(arm, cx, v); } @@ -246,70 +256,82 @@ pub fn map_method(impl_did: def_id, impl_path: @path, cx.local_id += 1u; } -pub fn map_item(i: @item, &&cx: @mut Ctx, v: vt) { +pub fn map_item(i: @item, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { let item_path = @/* FIXME (#2543) */ copy cx.path; cx.map.insert(i.id, node_item(i, item_path)); match i.node { - item_impl(_, _, _, ms) => { - let impl_did = ast_util::local_def(i.id); - for ms.each |m| { - map_method(impl_did, extend(cx, i.ident), *m, cx); + item_impl(_, _, _, ref ms) => { + let impl_did = ast_util::local_def(i.id); + for ms.each |m| { + map_method(impl_did, extend(cx, i.ident), *m, cx); + } } - } - item_enum(ref enum_definition, _) => { - for (*enum_definition).variants.each |v| { - cx.map.insert(v.node.id, node_variant( - /* FIXME (#2543) */ copy *v, i, - extend(cx, i.ident))); + item_enum(ref enum_definition, _) => { + for (*enum_definition).variants.each |v| { + cx.map.insert(v.node.id, node_variant( + /* FIXME (#2543) */ copy *v, i, + extend(cx, i.ident))); + } } - } - item_foreign_mod(nm) => { - let abi = match attr::foreign_abi(i.attrs) { - either::Left(ref msg) => cx.diag.span_fatal(i.span, (*msg)), - either::Right(abi) => abi - }; - for nm.items.each |nitem| { - cx.map.insert(nitem.id, - node_foreign_item(*nitem, abi, - /* FIXME (#2543) */ - if nm.sort == ast::named { - extend(cx, i.ident) - } - else { - /* Anonymous extern mods go - in the parent scope */ - @copy cx.path - })); + item_foreign_mod(ref nm) => { + let abi = match attr::foreign_abi(i.attrs) { + Left(ref msg) => cx.diag.span_fatal(i.span, (*msg)), + Right(abi) => abi + }; + for nm.items.each |nitem| { + cx.map.insert(nitem.id, + node_foreign_item( + *nitem, + abi, + // FIXME (#2543) + if nm.sort == ast::named { + extend(cx, i.ident) + } else { + // Anonymous extern mods go in the parent scope + @copy cx.path + } + ) + ); + } } - } - item_struct(struct_def, _) => { - map_struct_def(struct_def, node_item(i, item_path), i.ident, cx, - v); - } - item_trait(_, ref traits, ref methods) => { - for traits.each |p| { - cx.map.insert(p.ref_id, node_item(i, item_path)); + item_struct(struct_def, _) => { + map_struct_def( + struct_def, + node_item(i, item_path), + i.ident, + cx, + v + ); } - for (*methods).each |tm| { - let id = ast_util::trait_method_to_ty_method(*tm).id; - let d_id = ast_util::local_def(i.id); - cx.map.insert(id, node_trait_method(@*tm, d_id, item_path)); + item_trait(_, ref traits, ref methods) => { + for traits.each |p| { + cx.map.insert(p.ref_id, node_item(i, item_path)); + } + for methods.each |tm| { + let id = ast_util::trait_method_to_ty_method(*tm).id; + let d_id = ast_util::local_def(i.id); + cx.map.insert(id, node_trait_method(@*tm, d_id, item_path)); + } } - } - _ => () + _ => () } match i.node { - item_mod(_) | item_foreign_mod(_) => { - cx.path.push(path_mod(i.ident)); - } - _ => cx.path.push(path_name(i.ident)) + item_mod(_) | item_foreign_mod(_) => { + cx.path.push(path_mod(i.ident)); + } + _ => cx.path.push(path_name(i.ident)) } visit::visit_item(i, cx, v); cx.path.pop(); } -pub fn map_struct_def(struct_def: @ast::struct_def, parent_node: ast_node, - ident: ast::ident, cx: @mut Ctx, _v: vt) { +pub fn map_struct_def( + struct_def: @ast::struct_def, + parent_node: ast_node, + ident: ast::ident, + cx: @mut Ctx, + _v: visit::vt<@mut Ctx> +) { let p = extend(cx, ident); // If this is a tuple-like struct, register the constructor. match struct_def.ctor_id { @@ -326,12 +348,12 @@ pub fn map_struct_def(struct_def: @ast::struct_def, parent_node: ast_node, } } -pub fn map_expr(ex: @expr, &&cx: @mut Ctx, v: vt) { +pub fn map_expr(ex: @expr, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { cx.map.insert(ex.id, node_expr(ex)); visit::visit_expr(ex, cx, v); } -pub fn map_stmt(stmt: @stmt, &&cx: @mut Ctx, v: vt) { +pub fn map_stmt(stmt: @stmt, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { cx.map.insert(stmt_id(*stmt), node_stmt(stmt)); visit::visit_stmt(stmt, cx, v); } @@ -400,8 +422,8 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str { } pub fn node_item_query(items: map, id: node_id, - query: fn(@item) -> Result, - error_msg: ~str) -> Result { + query: fn(@item) -> Result, + +error_msg: ~str) -> Result { match items.find(&id) { Some(node_item(it, _)) => query(it), _ => fail!(error_msg) diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index fec3a961a52a2..3830c81f08417 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -263,13 +263,13 @@ pub fn public_methods(ms: ~[@method]) -> ~[@method] { pub fn trait_method_to_ty_method(method: trait_method) -> ty_method { match method { required(ref m) => (*m), - provided(m) => { + provided(ref m) => { ty_method { ident: m.ident, - attrs: m.attrs, + attrs: copy m.attrs, purity: m.purity, - decl: m.decl, - tps: m.tps, + decl: copy m.decl, + tps: copy m.tps, self_ty: m.self_ty, id: m.id, span: m.span, @@ -396,8 +396,8 @@ pub fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { visit_view_item: fn@(vi: @view_item) { match vi.node { view_item_extern_mod(_, _, id) => vfn(id), - view_item_use(vps) => { - for vec::each(vps) |vp| { + view_item_use(ref vps) => { + for vps.each |vp| { match vp.node { view_path_simple(_, _, _, id) => vfn(id), view_path_glob(_, id) => vfn(id), @@ -457,29 +457,29 @@ pub fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { } }, - visit_ty_params: fn@(ps: ~[ty_param]) { - for vec::each(ps) |p| { + visit_ty_params: fn@(ps: &[ty_param]) { + for ps.each |p| { vfn(p.id); } }, - visit_fn: fn@(fk: visit::fn_kind, d: ast::fn_decl, + visit_fn: fn@(fk: &visit::fn_kind, d: ast::fn_decl, _b: ast::blk, _sp: span, id: ast::node_id) { vfn(id); - match fk { - visit::fk_dtor(tps, _, self_id, parent_id) => { - for vec::each(tps) |tp| { vfn(tp.id); } + match *fk { + visit::fk_dtor(ref tps, _, self_id, parent_id) => { + for tps.each |tp| { vfn(tp.id); } vfn(id); vfn(self_id); vfn(parent_id.node); } - visit::fk_item_fn(_, tps, _) => { - for vec::each(tps) |tp| { vfn(tp.id); } + visit::fk_item_fn(_, ref tps, _) => { + for tps.each |tp| { vfn(tp.id); } } - visit::fk_method(_, tps, m) => { + visit::fk_method(_, ref tps, m) => { vfn(m.self_id); - for vec::each(tps) |tp| { vfn(tp.id); } + for tps.each |tp| { vfn(tp.id); } } visit::fk_anon(_) | visit::fk_fn_block => { @@ -497,7 +497,7 @@ pub fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { visit_trait_method: fn@(_ty_m: trait_method) { }, - visit_struct_def: fn@(_sd: @struct_def, _id: ident, _tps: ~[ty_param], + visit_struct_def: fn@(_sd: @struct_def, _id: ident, _tps: &[ty_param], _id: node_id) { }, @@ -539,12 +539,12 @@ pub fn walk_pat(pat: @pat, it: fn(@pat)) { it(pat); match pat.node { pat_ident(_, _, Some(p)) => walk_pat(p, it), - pat_rec(fields, _) | pat_struct(_, fields, _) => { + pat_rec(ref fields, _) | pat_struct(_, ref fields, _) => { for fields.each |f| { walk_pat(f.pat, it) } } - pat_enum(_, Some(s)) | pat_tup(s) => { + pat_enum(_, Some(ref s)) | pat_tup(ref s) => { for s.each |p| { walk_pat(*p, it) } @@ -552,17 +552,16 @@ pub fn walk_pat(pat: @pat, it: fn(@pat)) { pat_box(s) | pat_uniq(s) | pat_region(s) => { walk_pat(s, it) } - pat_vec(elts, tail) => { + pat_vec(ref elts, ref tail) => { for elts.each |p| { walk_pat(*p, it) } - do option::iter(&tail) |tail| { + do tail.iter |tail| { walk_pat(*tail, it) } } pat_wild | pat_lit(_) | pat_range(_, _) | pat_ident(_, _, _) | - pat_enum(_, _) => { - } + pat_enum(_, _) => { } } } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 01b890fdeeb9b..dad1c2a7feafe 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -123,8 +123,8 @@ pub fn get_meta_item_value_str(meta: @ast::meta_item) -> Option<@~str> { pub fn get_meta_item_list(meta: @ast::meta_item) -> Option<~[@ast::meta_item]> { match meta.node { - ast::meta_list(_, l) => option::Some(/* FIXME (#2543) */ copy l), - _ => option::None + ast::meta_list(_, ref l) => Some(/* FIXME (#2543) */ copy *l), + _ => None } } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 2f35e9ed97864..8d99c43d43d20 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -46,7 +46,7 @@ pub struct SyntaxExpanderTT { } pub type SyntaxExpanderTTFun - = fn@(ext_ctxt, span, ~[ast::token_tree]) -> MacResult; + = fn@(ext_ctxt, span, &[ast::token_tree]) -> MacResult; pub struct SyntaxExpanderTTItem { expander: SyntaxExpanderTTItemFun, @@ -333,11 +333,11 @@ pub fn get_single_str_from_tts(cx: ext_ctxt, } } -pub fn get_exprs_from_tts(cx: ext_ctxt, tts: ~[ast::token_tree]) +pub fn get_exprs_from_tts(cx: ext_ctxt, tts: &[ast::token_tree]) -> ~[@ast::expr] { let p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), - tts); + vec::from_slice(tts)); let mut es = ~[]; while *p.token != token::EOF { if es.len() != 0 { diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index d83a9f39c5b0d..d4104c3a14af5 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -13,7 +13,7 @@ use core::prelude::*; use ext::base::*; use ext::base; -pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { let mut res_str = ~""; for tts.eachi |i, e| { diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index 4420c020a0b1d..f546c718393b6 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -22,7 +22,7 @@ use ext::build::mk_uniq_str; use core::option; use core::os; -pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { let var = get_single_str_from_tts(cx, sp, tts, "env!"); diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs index 937bcef5c253b..d15b228365ce5 100644 --- a/src/libsyntax/ext/fmt.rs +++ b/src/libsyntax/ext/fmt.rs @@ -25,9 +25,9 @@ use ext::base; use ext::build::*; use private::extfmt::ct::*; -pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { - let args = get_exprs_from_tts(cx, copy tts); + let args = get_exprs_from_tts(cx, tts); if args.len() == 0 { cx.span_fatal(sp, "fmt! takes at least 1 argument."); } diff --git a/src/libsyntax/ext/log_syntax.rs b/src/libsyntax/ext/log_syntax.rs index f713e5ce7d839..a59e43b1c1a15 100644 --- a/src/libsyntax/ext/log_syntax.rs +++ b/src/libsyntax/ext/log_syntax.rs @@ -19,12 +19,14 @@ use core::option; pub fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, - tt: ~[ast::token_tree]) + tt: &[ast::token_tree]) -> base::MacResult { cx.print_backtrace(); io::stdout().write_line( - print::pprust::tt_to_str(ast::tt_delim(tt),cx.parse_sess().interner)); + print::pprust::tt_to_str( + ast::tt_delim(vec::from_slice(tt)), + cx.parse_sess().interner)); //trivial expression MRExpr(@ast::expr { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index d529ee0c01b01..6d1ab58444693 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -219,19 +219,19 @@ pub mod rt { pub fn expand_quote_tokens(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> base::MacResult { + tts: &[ast::token_tree]) -> base::MacResult { base::MRExpr(expand_tts(cx, sp, tts)) } pub fn expand_quote_expr(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> base::MacResult { + tts: &[ast::token_tree]) -> base::MacResult { base::MRExpr(expand_parse_call(cx, sp, ~"parse_expr", ~[], tts)) } pub fn expand_quote_item(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> base::MacResult { + tts: &[ast::token_tree]) -> base::MacResult { let e_attrs = build::mk_uniq_vec_e(cx, sp, ~[]); base::MRExpr(expand_parse_call(cx, sp, ~"parse_item", ~[e_attrs], tts)) @@ -239,7 +239,7 @@ pub fn expand_quote_item(cx: ext_ctxt, pub fn expand_quote_pat(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> base::MacResult { + tts: &[ast::token_tree]) -> base::MacResult { let e_refutable = build::mk_lit(cx, sp, ast::lit_bool(true)); base::MRExpr(expand_parse_call(cx, sp, ~"parse_pat", ~[e_refutable], tts)) @@ -247,7 +247,7 @@ pub fn expand_quote_pat(cx: ext_ctxt, pub fn expand_quote_ty(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> base::MacResult { + tts: &[ast::token_tree]) -> base::MacResult { let e_param_colons = build::mk_lit(cx, sp, ast::lit_bool(false)); base::MRExpr(expand_parse_call(cx, sp, ~"parse_ty", ~[e_param_colons], tts)) @@ -255,7 +255,7 @@ pub fn expand_quote_ty(cx: ext_ctxt, pub fn expand_quote_stmt(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> base::MacResult { + tts: &[ast::token_tree]) -> base::MacResult { let e_attrs = build::mk_uniq_vec_e(cx, sp, ~[]); base::MRExpr(expand_parse_call(cx, sp, ~"parse_stmt", ~[e_attrs], tts)) @@ -506,7 +506,7 @@ fn mk_tts(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) fn expand_tts(cx: ext_ctxt, sp: span, - tts: ~[ast::token_tree]) -> @ast::expr { + tts: &[ast::token_tree]) -> @ast::expr { // NB: It appears that the main parser loses its mind if we consider // $foo as a tt_nonterminal during the main parse, so we have to re-parse @@ -514,7 +514,11 @@ fn expand_tts(cx: ext_ctxt, // it has to do with transition away from supporting old-style macros, so // try removing it when enough of them are gone. - let p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts); + let p = parse::new_parser_from_tts( + cx.parse_sess(), + cx.cfg(), + vec::from_slice(tts) + ); *p.quote_depth += 1u; let tts = p.parse_all_token_trees(); p.abort_if_errors(); @@ -578,7 +582,7 @@ fn expand_parse_call(cx: ext_ctxt, sp: span, parse_method: ~str, arg_exprs: ~[@ast::expr], - tts: ~[ast::token_tree]) -> @ast::expr { + tts: &[ast::token_tree]) -> @ast::expr { let tts_expr = expand_tts(cx, sp, tts); let cfg_call = || build::mk_call_( diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index f1b1602d0a645..c4049b796c550 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -54,7 +54,7 @@ fn topmost_expn_info(expn_info: @codemap::ExpnInfo) -> @codemap::ExpnInfo { } /* line!(): expands to the current line number */ -pub fn expand_line(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_line(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { base::check_zero_tts(cx, sp, tts, "line!"); @@ -65,7 +65,7 @@ pub fn expand_line(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) } /* col!(): expands to the current column number */ -pub fn expand_col(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_col(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { base::check_zero_tts(cx, sp, tts, "col!"); @@ -77,7 +77,7 @@ pub fn expand_col(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) /* file!(): expands to the current filename */ /* The filemap (`loc.file`) contains a bunch more information we could spit * out if we wanted. */ -pub fn expand_file(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_file(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { base::check_zero_tts(cx, sp, tts, "file!"); @@ -87,13 +87,13 @@ pub fn expand_file(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) base::MRExpr(mk_base_str(cx, topmost.call_site, filename)) } -pub fn expand_stringify(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_stringify(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { let s = pprust::tts_to_str(tts, cx.parse_sess().interner); base::MRExpr(mk_base_str(cx, sp, s)) } -pub fn expand_mod(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_mod(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { base::check_zero_tts(cx, sp, tts, "module_path!"); base::MRExpr(mk_base_str(cx, sp, @@ -101,7 +101,7 @@ pub fn expand_mod(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) |x| cx.str_of(*x)), ~"::"))) } -pub fn expand_include(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_include(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { let file = get_single_str_from_tts(cx, sp, tts, "include!"); let p = parse::new_sub_parser_from_file( @@ -110,7 +110,7 @@ pub fn expand_include(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) base::MRExpr(p.parse_expr()) } -pub fn expand_include_str(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_include_str(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { let file = get_single_str_from_tts(cx, sp, tts, "include_str!"); let res = io::read_whole_file_str(&res_rel_file(cx, sp, &Path(file))); @@ -124,7 +124,7 @@ pub fn expand_include_str(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) base::MRExpr(mk_base_str(cx, sp, result::unwrap(res))) } -pub fn expand_include_bin(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) +pub fn expand_include_bin(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { let file = get_single_str_from_tts(cx, sp, tts, "include_bin!"); match io::read_whole_file(&res_rel_file(cx, sp, &Path(file))) { diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index f662f0337bfbc..842727f092a29 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use prelude::*; + use ast::tt_delim; use ast; use codemap::span; @@ -16,14 +18,16 @@ use ext::base; use parse::lexer::{new_tt_reader, reader}; use parse::parser::Parser; -use core::option::None; - pub fn expand_trace_macros(cx: ext_ctxt, sp: span, - tt: ~[ast::token_tree]) -> base::MacResult { + tt: &[ast::token_tree]) -> base::MacResult { let sess = cx.parse_sess(); let cfg = cx.cfg(); - let tt_rdr = new_tt_reader(copy cx.parse_sess().span_diagnostic, - cx.parse_sess().interner, None, tt); + let tt_rdr = new_tt_reader( + copy cx.parse_sess().span_diagnostic, + cx.parse_sess().interner, + None, + vec::from_slice(tt) + ); let rdr = tt_rdr as reader; let rust_parser = Parser(sess, cfg, rdr.dup()); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index f42bb01f7c21d..cfb6e45cb5bb9 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -71,7 +71,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, // Given `lhses` and `rhses`, this is the new macro we create fn generic_extension(cx: ext_ctxt, sp: span, name: ident, - arg: ~[ast::token_tree], + arg: &[ast::token_tree], lhses: ~[@named_match], rhses: ~[@named_match]) -> MacResult { @@ -79,7 +79,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, io::println(fmt!("%s! { %s }", cx.str_of(name), print::pprust::tt_to_str( - ast::tt_delim(arg), + ast::tt_delim(vec::from_slice(arg)), cx.parse_sess().interner))); } @@ -94,7 +94,12 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, match *lhs { @matched_nonterminal(nt_matchers(ref mtcs)) => { // `none` is because we're not interpolating - let arg_rdr = new_tt_reader(s_d, itr, None, arg) as reader; + let arg_rdr = new_tt_reader( + s_d, + itr, + None, + vec::from_slice(arg) + ) as reader; match parse(cx.parse_sess(), cx.cfg(), arg_rdr, (*mtcs)) { success(named_matches) => { let rhs = match rhses[i] { @@ -136,7 +141,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, cx.span_fatal(best_fail_spot, best_fail_msg); } - let exp: @fn(ext_ctxt, span, ~[ast::token_tree]) -> MacResult = + let exp: @fn(ext_ctxt, span, &[ast::token_tree]) -> MacResult = |cx, sp, arg| generic_extension(cx, sp, name, arg, lhses, rhses); return MRDef(MacroDef{ diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index e334df15073f6..d79a2bbf17469 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -30,17 +30,21 @@ use core::vec; pub enum vt { mk_vt(visitor), } pub enum fn_kind { - fk_item_fn(ident, ~[ty_param], purity), // fn foo() - fk_method(ident, ~[ty_param], @method), // fn foo(&self) + fk_item_fn(ident, &[ty_param], purity), // fn foo() + fk_method(ident, &[ty_param], &method), // fn foo(&self) fk_anon(ast::Sigil), // fn@(x, y) { ... } fk_fn_block, // |x, y| ... - fk_dtor(~[ty_param], ~[attribute], node_id /* self id */, - def_id /* parent class id */) // class destructor + fk_dtor( // class destructor + &[ty_param], + &[attribute], + node_id /* self id */, + def_id /* parent class id */ + ) } -pub fn name_of_fn(fk: fn_kind) -> ident { - match fk { +pub fn name_of_fn(fk: &fn_kind) -> ident { + match *fk { fk_item_fn(name, _, _) | fk_method(name, _, _) => { /* FIXME (#2543) */ copy name } @@ -49,18 +53,16 @@ pub fn name_of_fn(fk: fn_kind) -> ident { } } -pub fn tps_of_fn(fk: fn_kind) -> ~[ty_param] { - match fk { +pub fn tps_of_fn(fk: &fn_kind) -> ~[ty_param] { + match *fk { fk_item_fn(_, tps, _) | fk_method(_, tps, _) | - fk_dtor(tps, _, _, _) => { - /* FIXME (#2543) */ copy tps - } + fk_dtor(tps, _, _, _) => /* FIXME (#2543) */ vec::from_slice(tps), fk_anon(*) | fk_fn_block(*) => ~[] } } pub struct Visitor { - visit_mod: fn@(_mod, span, node_id, E, vt), + visit_mod: fn@(&_mod, span, node_id, E, vt), visit_view_item: fn@(@view_item, E, vt), visit_foreign_item: fn@(@foreign_item, E, vt), visit_item: fn@(@item, E, vt), @@ -73,11 +75,11 @@ pub struct Visitor { visit_expr: fn@(@expr, E, vt), visit_expr_post: fn@(@expr, E, vt), visit_ty: fn@(@Ty, E, vt), - visit_ty_params: fn@(~[ty_param], E, vt), - visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id, E, vt), + visit_ty_params: fn@(&[ty_param], E, vt), + visit_fn: fn@(&fn_kind, fn_decl, blk, span, node_id, E, vt), visit_ty_method: fn@(ty_method, E, vt), visit_trait_method: fn@(trait_method, E, vt), - visit_struct_def: fn@(@struct_def, ident, ~[ty_param], node_id, E, + visit_struct_def: fn@(@struct_def, ident, &[ty_param], node_id, E, vt), visit_struct_field: fn@(@struct_field, E, vt), visit_struct_method: fn@(@method, E, vt) @@ -112,10 +114,10 @@ pub fn default_visitor() -> visitor { } pub fn visit_crate(c: crate, e: E, v: vt) { - (v.visit_mod)(c.node.module, c.span, crate_node_id, e, v); + (v.visit_mod)(&c.node.module, c.span, crate_node_id, e, v); } -pub fn visit_mod(m: _mod, _sp: span, _id: node_id, e: E, v: vt) { +pub fn visit_mod(m: &_mod, _sp: span, _id: node_id, e: E, v: vt) { for m.view_items.each |vi| { (v.visit_view_item)(*vi, e, v); } for m.items.each |i| { (v.visit_item)(*i, e, v); } } @@ -133,65 +135,65 @@ pub fn visit_local(loc: @local, e: E, v: vt) { pub fn visit_item(i: @item, e: E, v: vt) { match i.node { - item_const(t, ex) => { - (v.visit_ty)(t, e, v); - (v.visit_expr)(ex, e, v); - } - item_fn(ref decl, purity, ref tp, ref body) => { - (v.visit_fn)( - fk_item_fn( - /* FIXME (#2543) */ copy i.ident, - /* FIXME (#2543) */ copy *tp, - purity - ), - /* FIXME (#2543) */ copy *decl, - (*body), - i.span, - i.id, - e, - v - ); - } - item_mod(m) => (v.visit_mod)(m, i.span, i.id, e, v), - item_foreign_mod(nm) => { - for nm.view_items.each |vi| { (v.visit_view_item)(*vi, e, v); } - for nm.items.each |ni| { (v.visit_foreign_item)(*ni, e, v); } - } - item_ty(t, tps) => { - (v.visit_ty)(t, e, v); - (v.visit_ty_params)(tps, e, v); - } - item_enum(ref enum_definition, ref tps) => { - (v.visit_ty_params)(/* FIXME (#2543) */ copy *tps, e, v); - visit_enum_def( - *enum_definition, - /* FIXME (#2543) */ copy *tps, - e, - v - ); - } - item_impl(tps, traits, ty, methods) => { - (v.visit_ty_params)(tps, e, v); - for traits.each |p| { - visit_path(p.path, e, v); + item_const(t, ex) => { + (v.visit_ty)(t, e, v); + (v.visit_expr)(ex, e, v); } - (v.visit_ty)(ty, e, v); - for methods.each |m| { - visit_method_helper(*m, e, v) + item_fn(ref decl, purity, ref tps, ref body) => { + (v.visit_fn)( + &fk_item_fn( + /* FIXME (#2543) */ copy i.ident, + *tps, + purity + ), + /* FIXME (#2543) */ copy *decl, + *body, + i.span, + i.id, + e, + v + ); } - } - item_struct(struct_def, tps) => { - (v.visit_ty_params)(tps, e, v); - (v.visit_struct_def)(struct_def, i.ident, tps, i.id, e, v); - } - item_trait(ref tps, ref traits, ref methods) => { - (v.visit_ty_params)(/* FIXME (#2543) */ copy *tps, e, v); - for traits.each |p| { visit_path(p.path, e, v); } - for (*methods).each |m| { - (v.visit_trait_method)(*m, e, v); + item_mod(ref m) => (v.visit_mod)(m, i.span, i.id, e, v), + item_foreign_mod(ref nm) => { + for nm.view_items.each |vi| { (v.visit_view_item)(*vi, e, v); } + for nm.items.each |ni| { (v.visit_foreign_item)(*ni, e, v); } } - } - item_mac(ref m) => visit_mac((*m), e, v) + item_ty(t, ref tps) => { + (v.visit_ty)(t, e, v); + (v.visit_ty_params)(*tps, e, v); + } + item_enum(ref enum_definition, ref tps) => { + (v.visit_ty_params)(*tps, e, v); + visit_enum_def( + *enum_definition, + /* FIXME (#2543) */ copy *tps, + e, + v + ); + } + item_impl(ref tps, ref traits, ty, ref methods) => { + (v.visit_ty_params)(*tps, e, v); + for traits.each |p| { + visit_path(p.path, e, v); + } + (v.visit_ty)(ty, e, v); + for methods.each |m| { + visit_method_helper(*m, e, v) + } + } + item_struct(struct_def, ref tps) => { + (v.visit_ty_params)(*tps, e, v); + (v.visit_struct_def)(struct_def, i.ident, *tps, i.id, e, v); + } + item_trait(ref tps, ref traits, ref methods) => { + (v.visit_ty_params)(*tps, e, v); + for traits.each |p| { visit_path(p.path, e, v); } + for (*methods).each |m| { + (v.visit_trait_method)(*m, e, v); + } + } + item_mac(ref m) => visit_mac((*m), e, v) } } @@ -296,13 +298,13 @@ pub fn visit_pat(p: @pat, e: E, v: vt) { pub fn visit_foreign_item(ni: @foreign_item, e: E, v: vt) { match ni.node { - foreign_item_fn(fd, _, tps) => { - (v.visit_ty_params)(tps, e, v); - visit_fn_decl(fd, e, v); - } - foreign_item_const(t) => { - (v.visit_ty)(t, e, v); - } + foreign_item_fn(fd, _, ref tps) => { + (v.visit_ty_params)(*tps, e, v); + visit_fn_decl(fd, e, v); + } + foreign_item_const(t) => { + (v.visit_ty)(t, e, v); + } } } @@ -315,7 +317,7 @@ pub fn visit_ty_param_bounds(bounds: @~[ty_param_bound], e: E, v: vt) { } } -pub fn visit_ty_params(tps: ~[ty_param], e: E, v: vt) { +pub fn visit_ty_params(tps: &[ty_param], e: E, v: vt) { for tps.each |tp| { visit_ty_param_bounds(tp.bounds, e, v); } @@ -333,21 +335,42 @@ pub fn visit_fn_decl(fd: fn_decl, e: E, v: vt) { // visit_fn() and check for fk_method(). I named this visit_method_helper() // because it is not a default impl of any method, though I doubt that really // clarifies anything. - Niko -pub fn visit_method_helper(m: @method, e: E, v: vt) { - (v.visit_fn)(fk_method(/* FIXME (#2543) */ copy m.ident, - /* FIXME (#2543) */ copy m.tps, m), - m.decl, m.body, m.span, m.id, e, v); -} - -pub fn visit_struct_dtor_helper(dtor: struct_dtor, tps: ~[ty_param], +pub fn visit_method_helper(m: &method, e: E, v: vt) { + (v.visit_fn)( + &fk_method( + /* FIXME (#2543) */ copy m.ident, + m.tps, + m + ), + m.decl, + m.body, + m.span, + m.id, + e, + v + ); +} + +pub fn visit_struct_dtor_helper(dtor: struct_dtor, tps: &[ty_param], parent_id: def_id, e: E, v: vt) { - (v.visit_fn)(fk_dtor(/* FIXME (#2543) */ copy tps, dtor.node.attrs, - dtor.node.self_id, parent_id), ast_util::dtor_dec(), - dtor.node.body, dtor.span, dtor.node.id, e, v) - -} - -pub fn visit_fn(fk: fn_kind, decl: fn_decl, body: blk, _sp: span, + (v.visit_fn)( + &fk_dtor( + tps, + dtor.node.attrs, + dtor.node.self_id, + parent_id + ), + ast_util::dtor_dec(), + dtor.node.body, + dtor.span, + dtor.node.id, + e, + v + ) + +} + +pub fn visit_fn(fk: &fn_kind, decl: fn_decl, body: blk, _sp: span, _id: node_id, e: E, v: vt) { visit_fn_decl(decl, e, v); (v.visit_ty_params)(tps_of_fn(fk), e, v); @@ -367,13 +390,13 @@ pub fn visit_trait_method(m: trait_method, e: E, v: vt) { } } -pub fn visit_struct_def(sd: @struct_def, _nm: ast::ident, tps: ~[ty_param], +pub fn visit_struct_def(sd: @struct_def, _nm: ast::ident, tps: &[ty_param], id: node_id, e: E, v: vt) { for sd.fields.each |f| { (v.visit_struct_field)(*f, e, v); } - do option::iter(&sd.dtor) |dtor| { - visit_struct_dtor_helper(*dtor, tps, ast_util::local_def(id), e, v) + do sd.dtor.iter |dtor| { + visit_struct_dtor_helper(*dtor, tps, ast_util::local_def(id), e, v) }; } @@ -476,7 +499,7 @@ pub fn visit_expr(ex: @expr, e: E, v: vt) { } expr_fn(proto, ref decl, ref body, _) => { (v.visit_fn)( - fk_anon(proto), + &fk_anon(proto), /* FIXME (#2543) */ copy *decl, *body, ex.span, @@ -487,7 +510,7 @@ pub fn visit_expr(ex: @expr, e: E, v: vt) { } expr_fn_block(ref decl, ref body) => { (v.visit_fn)( - fk_fn_block, + &fk_fn_block, /* FIXME (#2543) */ copy *decl, *body, ex.span, @@ -539,7 +562,7 @@ pub fn visit_arm(a: arm, e: E, v: vt) { // calls the given functions on the nodes. pub struct SimpleVisitor { - visit_mod: fn@(_mod, span, node_id), + visit_mod: fn@(&_mod, span, node_id), visit_view_item: fn@(@view_item), visit_foreign_item: fn@(@foreign_item), visit_item: fn@(@item), @@ -552,11 +575,11 @@ pub struct SimpleVisitor { visit_expr: fn@(@expr), visit_expr_post: fn@(@expr), visit_ty: fn@(@Ty), - visit_ty_params: fn@(~[ty_param]), - visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id), + visit_ty_params: fn@(&[ty_param]), + visit_fn: fn@(&fn_kind, fn_decl, blk, span, node_id), visit_ty_method: fn@(ty_method), visit_trait_method: fn@(trait_method), - visit_struct_def: fn@(@struct_def, ident, ~[ty_param], node_id), + visit_struct_def: fn@(@struct_def, ident, &[ty_param], node_id), visit_struct_field: fn@(@struct_field), visit_struct_method: fn@(@method) } @@ -580,21 +603,27 @@ pub fn default_simple_visitor() -> @SimpleVisitor { visit_expr: |_e| { }, visit_expr_post: |_e| { }, visit_ty: simple_ignore_ty, - visit_ty_params: fn@(_ps: ~[ty_param]) { }, - visit_fn: fn@(_fk: fn_kind, _d: fn_decl, _b: blk, _sp: span, + visit_ty_params: fn@(_ps: &[ty_param]) { }, + visit_fn: fn@(_fk: &fn_kind, _d: fn_decl, _b: blk, _sp: span, _id: node_id) { }, visit_ty_method: fn@(_m: ty_method) { }, visit_trait_method: fn@(_m: trait_method) { }, visit_struct_def: fn@(_sd: @struct_def, _nm: ident, - _tps: ~[ty_param], _id: node_id) { }, + _tps: &[ty_param], _id: node_id) { }, visit_struct_field: fn@(_f: @struct_field) { }, visit_struct_method: fn@(_m: @method) { } } } pub fn mk_simple_visitor(v: simple_visitor) -> vt<()> { - fn v_mod(f: fn@(_mod, span, node_id), m: _mod, sp: span, id: node_id, - &&e: (), v: vt<()>) { + fn v_mod( + f: fn@(&_mod, span, node_id), + m: &_mod, + sp: span, + id: node_id, + &&e: (), + v: vt<()> + ) { f(m, sp, id); visit_mod(m, sp, id, e, v); } @@ -655,20 +684,21 @@ pub fn mk_simple_visitor(v: simple_visitor) -> vt<()> { f(m); visit_trait_method(m, e, v); } - fn v_struct_def(f: fn@(@struct_def, ident, ~[ty_param], node_id), - sd: @struct_def, nm: ident, tps: ~[ty_param], id: node_id, + fn v_struct_def(f: fn@(@struct_def, ident, &[ty_param], node_id), + sd: @struct_def, nm: ident, tps: &[ty_param], id: node_id, &&e: (), v: vt<()>) { f(sd, nm, tps, id); visit_struct_def(sd, nm, tps, id, e, v); } - fn v_ty_params(f: fn@(~[ty_param]), - ps: ~[ty_param], - &&e: (), v: vt<()>) { + fn v_ty_params(f: fn@(&[ty_param]), + ps: &[ty_param], + &&e: (), + v: vt<()>) { f(ps); visit_ty_params(ps, e, v); } - fn v_fn(f: fn@(fn_kind, fn_decl, blk, span, node_id), - fk: fn_kind, decl: fn_decl, body: blk, sp: span, + fn v_fn(f: fn@(&fn_kind, fn_decl, blk, span, node_id), + fk: &fn_kind, decl: fn_decl, body: blk, sp: span, id: node_id, &&e: (), v: vt<()>) { f(fk, decl, body, sp, id); visit_fn(fk, decl, body, sp, id, e, v); @@ -700,12 +730,9 @@ pub fn mk_simple_visitor(v: simple_visitor) -> vt<()> { visit_expr_post: |a,b,c| v_expr_post(v.visit_expr_post, a, b, c), visit_ty: visit_ty, - visit_ty_params: |a,b,c| - v_ty_params(v.visit_ty_params, a, b, c), - visit_fn: |a,b,c,d,e,f,g| - v_fn(v.visit_fn, a, b, c, d, e, f, g), - visit_ty_method: |a,b,c| - v_ty_method(v.visit_ty_method, a, b, c), + visit_ty_params: |a,b,c| v_ty_params(v.visit_ty_params, a, b, c), + visit_fn: |a,b,c,d,e,f,g| v_fn(v.visit_fn, a, b, c, d, e, f, g), + visit_ty_method: |a,b,c| v_ty_method(v.visit_ty_method, a, b, c), visit_trait_method: |a,b,c| v_trait_method(v.visit_trait_method, a, b, c), visit_struct_def: |a,b,c,d,e,f| From f14409c528125836a5cbe9d4d7a39b8d6695f7b1 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 17 Feb 2013 22:20:36 -0800 Subject: [PATCH 05/30] libsyntax: progress on making syntax::visit vecs_implicitly_copyable-free --- src/libfuzzer/fuzzer.rc | 18 +- src/librustc/driver/driver.rs | 2 +- src/librustc/front/config.rs | 20 +- src/librustc/front/core_inject.rs | 16 +- src/librustc/front/test.rs | 18 +- src/librustc/metadata/encoder.rs | 10 +- src/librustc/middle/astencode.rs | 8 +- src/librustc/middle/borrowck/check_loans.rs | 6 +- src/librustc/middle/borrowck/gather_loans.rs | 10 +- src/librustc/middle/check_loop.rs | 10 +- src/librustc/middle/check_match.rs | 4 +- src/librustc/middle/freevars.rs | 6 +- src/librustc/middle/kind.rs | 12 +- src/librustc/middle/lint.rs | 14 +- src/librustc/middle/liveness.rs | 36 +- src/librustc/middle/region.rs | 14 +- src/librustc/middle/resolve.rs | 26 +- src/librustc/middle/trans/base.rs | 17 +- src/librustc/middle/trans/callee.rs | 4 +- src/librustc/middle/trans/foreign.rs | 2 +- src/librustc/middle/trans/meth.rs | 2 +- src/librustc/middle/trans/reachable.rs | 10 +- src/librustc/middle/trans/type_use.rs | 8 +- src/librustc/middle/ty.rs | 2 +- src/librustc/middle/typeck/astconv.rs | 12 +- src/librustc/middle/typeck/check/_match.rs | 2 +- src/librustc/middle/typeck/check/mod.rs | 66 +- src/librustc/middle/typeck/check/regionck.rs | 4 +- src/librustc/middle/typeck/check/vtable.rs | 2 +- src/librustc/middle/typeck/check/writeback.rs | 4 +- src/librustc/middle/typeck/collect.rs | 47 +- src/librustc/util/common.rs | 4 +- src/librustc/util/ppaux.rs | 6 +- src/librustpkg/util.rs | 10 +- src/libsyntax/ast_map.rs | 20 +- src/libsyntax/ast_util.rs | 20 +- src/libsyntax/attr.rs | 38 +- src/libsyntax/ext/expand.rs | 131 +-- src/libsyntax/fold.rs | 849 ++++++++++-------- src/libsyntax/print/pprust.rs | 68 +- src/libsyntax/visit.rs | 425 ++++----- 41 files changed, 1061 insertions(+), 922 deletions(-) diff --git a/src/libfuzzer/fuzzer.rc b/src/libfuzzer/fuzzer.rc index a9ddfe8140455..1887b83ebebda 100644 --- a/src/libfuzzer/fuzzer.rc +++ b/src/libfuzzer/fuzzer.rc @@ -134,7 +134,7 @@ pub pure fn safe_to_use_expr(e: ast::expr, tm: test_mode) -> bool { pub fn safe_to_steal_ty(t: @ast::Ty, tm: test_mode) -> bool { // Restrictions happen to be the same. - safe_to_replace_ty(t.node, tm) + safe_to_replace_ty(&t.node, tm) } // Not type-parameterized: https://github.com/mozilla/rust/issues/898 (FIXED) @@ -175,8 +175,8 @@ pub fn steal(crate: ast::crate, tm: test_mode) -> StolenStuff { } -pub fn safe_to_replace_expr(e: ast::expr_, _tm: test_mode) -> bool { - match e { +pub fn safe_to_replace_expr(e: &ast::expr_, _tm: test_mode) -> bool { + match *e { // https://github.com/mozilla/rust/issues/652 ast::expr_if(*) => { false } ast::expr_block(_) => { false } @@ -188,8 +188,8 @@ pub fn safe_to_replace_expr(e: ast::expr_, _tm: test_mode) -> bool { } } -pub fn safe_to_replace_ty(t: ast::ty_, _tm: test_mode) -> bool { - match t { +pub fn safe_to_replace_ty(t: &ast::ty_, _tm: test_mode) -> bool { + match *t { ast::ty_infer => { false } // always implicit, always top level ast::ty_bot => { false } // in source, can only appear // as the out type of a function @@ -204,7 +204,7 @@ pub fn replace_expr_in_crate(crate: ast::crate, i: uint, ast::crate { let j: @mut uint = @mut 0u; fn fold_expr_rep(j_: @mut uint, i_: uint, newexpr_: ast::expr_, - original: ast::expr_, fld: fold::ast_fold, + original: &ast::expr_, fld: fold::ast_fold, tm_: test_mode) -> ast::expr_ { *j_ += 1u; @@ -221,7 +221,7 @@ pub fn replace_expr_in_crate(crate: ast::crate, i: uint, .. *fold::default_ast_fold() }; let af = fold::make_fold(afp); - let crate2: @ast::crate = @af.fold_crate(crate); + let crate2: @ast::crate = @af.fold_crate(&crate); *crate2 } @@ -231,7 +231,7 @@ pub fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::Ty, tm: test_mode) -> ast::crate { let j: @mut uint = @mut 0u; fn fold_ty_rep(j_: @mut uint, i_: uint, newty_: ast::ty_, - original: ast::ty_, fld: fold::ast_fold, + original: &ast::ty_, fld: fold::ast_fold, tm_: test_mode) -> ast::ty_ { *j_ += 1u; @@ -244,7 +244,7 @@ pub fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::Ty, .. *fold::default_ast_fold() }; let af = fold::make_fold(afp); - let crate2: @ast::crate = @af.fold_crate(crate); + let crate2: @ast::crate = @af.fold_crate(&crate); *crate2 } diff --git a/src/librustc/driver/driver.rs b/src/librustc/driver/driver.rs index 45bbe5d091e06..61bb03dd7f902 100644 --- a/src/librustc/driver/driver.rs +++ b/src/librustc/driver/driver.rs @@ -396,7 +396,7 @@ pub fn pretty_print_input(sess: Session, +cfg: ast::crate_cfg, input: input, pprust::node_block(s, ref blk) => { pp::space(s.s); pprust::synth_comment( - s, ~"block " + int::to_str((*blk).node.id)); + s, ~"block " + int::to_str(blk.node.id)); } pprust::node_expr(s, expr) => { pp::space(s.s); diff --git a/src/librustc/front/config.rs b/src/librustc/front/config.rs index afb73a6e7c35c..faa1ad9854af7 100644 --- a/src/librustc/front/config.rs +++ b/src/librustc/front/config.rs @@ -45,7 +45,7 @@ pub fn strip_items(crate: @ast::crate, in_cfg: in_cfg_pred) .. *fold::default_ast_fold()}; let fold = fold::make_fold(precursor); - let res = @fold.fold_crate(*crate); + let res = @fold.fold_crate(&*crate); return res; } @@ -63,7 +63,7 @@ fn filter_view_item(cx: @Context, &&view_item: @ast::view_item } } -fn fold_mod(cx: @Context, m: ast::_mod, fld: fold::ast_fold) -> ast::_mod { +fn fold_mod(cx: @Context, m: &ast::_mod, fld: fold::ast_fold) -> ast::_mod { let filtered_items = m.items.filter_mapped(|a| filter_item(cx, *a)); let filtered_view_items = @@ -83,7 +83,7 @@ fn filter_foreign_item(cx: @Context, &&item: @ast::foreign_item) -> fn fold_foreign_mod( cx: @Context, - nm: ast::foreign_mod, + nm: &ast::foreign_mod, fld: fold::ast_fold ) -> ast::foreign_mod { let filtered_items = @@ -98,21 +98,21 @@ fn fold_foreign_mod( } } -fn fold_item_underscore(cx: @Context, +item: ast::item_, +fn fold_item_underscore(cx: @Context, item: &ast::item_, fld: fold::ast_fold) -> ast::item_ { - let item = match item { - ast::item_impl(a, b, c, methods) => { + let item = match *item { + ast::item_impl(ref a, b, c, ref methods) => { let methods = methods.filtered(|m| method_in_cfg(cx, *m) ); - ast::item_impl(a, b, c, methods) + ast::item_impl(/*bad*/ copy *a, b, c, methods) } ast::item_trait(ref a, ref b, ref methods) => { let methods = methods.filtered(|m| trait_method_in_cfg(cx, m) ); ast::item_trait(/*bad*/copy *a, /*bad*/copy *b, methods) } - item => item + ref item => /*bad*/ copy *item }; - fold::noop_fold_item_underscore(item, fld) + fold::noop_fold_item_underscore(&item, fld) } fn filter_stmt(cx: @Context, &&stmt: @ast::stmt) -> @@ -134,7 +134,7 @@ fn filter_stmt(cx: @Context, &&stmt: @ast::stmt) -> fn fold_block( cx: @Context, - b: ast::blk_, + b: &ast::blk_, fld: fold::ast_fold ) -> ast::blk_ { let filtered_stmts = diff --git a/src/librustc/front/core_inject.rs b/src/librustc/front/core_inject.rs index 62a2dfd880b46..b7df99795924b 100644 --- a/src/librustc/front/core_inject.rs +++ b/src/librustc/front/core_inject.rs @@ -66,10 +66,13 @@ fn inject_libcore_ref(sess: Session, view_items: vis, ../*bad*/copy crate.module }; - new_module = fld.fold_mod(new_module); + new_module = fld.fold_mod(&new_module); // FIXME #2543: Bad copy. - let new_crate = ast::crate_ { module: new_module, ..copy crate }; + let new_crate = ast::crate_ { + module: new_module, + ..copy *crate + }; (new_crate, span) }, fold_mod: |module, fld| { @@ -95,12 +98,15 @@ fn inject_libcore_ref(sess: Session, let vis = vec::append(~[vi2], module.view_items); // FIXME #2543: Bad copy. - let new_module = ast::_mod { view_items: vis, ..copy module }; - fold::noop_fold_mod(new_module, fld) + let new_module = ast::_mod { + view_items: vis, + ..copy *module + }; + fold::noop_fold_mod(&new_module, fld) }, ..*fold::default_ast_fold() }; let fold = fold::make_fold(precursor); - @fold.fold_crate(*crate) + @fold.fold_crate(crate) } diff --git a/src/librustc/front/test.rs b/src/librustc/front/test.rs index f19b52661f210..4e845d156f462 100644 --- a/src/librustc/front/test.rs +++ b/src/librustc/front/test.rs @@ -91,7 +91,7 @@ fn generate_test_harness(sess: session::Session, fold_mod: |a,b| fold_mod(cx, a, b),.. *fold::default_ast_fold()}; let fold = fold::make_fold(precursor); - let res = @fold.fold_crate(*crate); + let res = @fold.fold_crate(&*crate); cx.ext_cx.bt_pop(); return res; } @@ -106,7 +106,7 @@ fn strip_test_functions(crate: @ast::crate) -> @ast::crate { } fn fold_mod(cx: @mut TestCtxt, - m: ast::_mod, + m: &ast::_mod, fld: fold::ast_fold) -> ast::_mod { // Remove any #[main] from the AST so it doesn't clash with @@ -125,19 +125,21 @@ fn fold_mod(cx: @mut TestCtxt, items: vec::map(m.items, |i| nomain(cx, *i)), }; - fold::noop_fold_mod(mod_nomain, fld) + fold::noop_fold_mod(&mod_nomain, fld) } fn fold_crate(cx: @mut TestCtxt, - c: ast::crate_, + c: &ast::crate_, fld: fold::ast_fold) -> ast::crate_ { let folded = fold::noop_fold_crate(c, fld); // Add a special __test module to the crate that will contain code // generated for the test harness - ast::crate_ { module: add_test_module(cx, /*bad*/copy folded.module), - .. folded } + ast::crate_ { + module: add_test_module(cx, &folded.module), + .. folded + } } @@ -238,11 +240,11 @@ fn should_fail(i: @ast::item) -> bool { vec::len(attr::find_attrs_by_name(i.attrs, ~"should_fail")) > 0u } -fn add_test_module(cx: &TestCtxt, +m: ast::_mod) -> ast::_mod { +fn add_test_module(cx: &TestCtxt, m: &ast::_mod) -> ast::_mod { let testmod = mk_test_module(cx); ast::_mod { items: vec::append_one(/*bad*/copy m.items, testmod), - .. m + .. /*bad*/ copy *m } } diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index c862a15dbc7a4..8324721c33a79 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -323,7 +323,7 @@ fn encode_path(ecx: @EncodeContext, ebml_w: writer::Encoder, } fn encode_info_for_mod(ecx: @EncodeContext, ebml_w: writer::Encoder, - md: _mod, id: node_id, path: &[ast_map::path_elt], + md: &_mod, id: node_id, path: &[ast_map::path_elt], name: ident) { ebml_w.start_tag(tag_items_data_item); encode_def_id(ebml_w, local_def(id)); @@ -621,7 +621,7 @@ fn encode_info_for_item(ecx: @EncodeContext, ebml_w: writer::Encoder, } ebml_w.end_tag(); } - item_mod(m) => { + item_mod(ref m) => { add_to_index(); encode_info_for_mod(ecx, ebml_w, m, item.id, path, item.ident); } @@ -856,8 +856,8 @@ fn encode_info_for_item(ecx: @EncodeContext, ebml_w: writer::Encoder, // method info, we output static methods with type signatures as // written. Here, we output the *real* type signatures. I feel like // maybe we should only ever handle the real type signatures. - for vec::each((*ms)) |m| { - let ty_m = ast_util::trait_method_to_ty_method(*m); + for ms.each |m| { + let ty_m = ast_util::trait_method_to_ty_method(m); if ty_m.self_ty.node != ast::sty_static { loop; } index.push(entry { val: ty_m.id, pos: ebml_w.writer.tell() }); @@ -927,7 +927,7 @@ fn encode_info_for_items(ecx: @EncodeContext, ebml_w: writer::Encoder, let index = @mut ~[]; ebml_w.start_tag(tag_items_data); index.push(entry { val: crate_node_id, pos: ebml_w.writer.tell() }); - encode_info_for_mod(ecx, ebml_w, crate.node.module, + encode_info_for_mod(ecx, ebml_w, &crate.node.module, crate_node_id, ~[], syntax::parse::token::special_idents::invalid); visit::visit_crate(*crate, (), visit::mk_vt(@visit::Visitor { diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index 0b1abd683b122..2658e5c467403 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -298,7 +298,7 @@ fn encode_ast(ebml_w: writer::Encoder, item: ast::inlined_item) { // nested items, as otherwise it would get confused when translating // inlined items. fn simplify_ast(ii: ast::inlined_item) -> ast::inlined_item { - fn drop_nested_items(blk: ast::blk_, fld: fold::ast_fold) -> ast::blk_ { + fn drop_nested_items(blk: &ast::blk_, fld: fold::ast_fold) -> ast::blk_ { let stmts_sans_items = do blk.stmts.filtered |stmt| { match stmt.node { ast::stmt_expr(_, _) | ast::stmt_semi(_, _) | @@ -317,7 +317,7 @@ fn simplify_ast(ii: ast::inlined_item) -> ast::inlined_item { id: blk.id, rules: blk.rules }; - fold::noop_fold_block(blk_sans_items, fld) + fold::noop_fold_block(&blk_sans_items, fld) } let fld = fold::make_fold(@fold::AstFoldFns { @@ -336,7 +336,7 @@ fn simplify_ast(ii: ast::inlined_item) -> ast::inlined_item { ast::ii_foreign(fld.fold_foreign_item(i)) } ast::ii_dtor(ref dtor, nm, ref tps, parent_id) => { - let dtor_body = fld.fold_block((*dtor).node.body); + let dtor_body = fld.fold_block(&dtor.node.body); ast::ii_dtor( codemap::spanned { node: ast::struct_dtor_ { body: dtor_body, @@ -372,7 +372,7 @@ fn renumber_ast(xcx: @ExtendedDecodeContext, ii: ast::inlined_item) ast::ii_foreign(fld.fold_foreign_item(i)) } ast::ii_dtor(ref dtor, nm, ref tps, parent_id) => { - let dtor_body = fld.fold_block((*dtor).node.body); + let dtor_body = fld.fold_block(&dtor.node.body); let dtor_attrs = fld.fold_attributes(/*bad*/copy (*dtor).node.attrs); let new_params = fold::fold_ty_params(/*bad*/copy *tps, fld); let dtor_id = fld.new_id((*dtor).node.id); diff --git a/src/librustc/middle/borrowck/check_loans.rs b/src/librustc/middle/borrowck/check_loans.rs index 0c043dac2167f..6efcc0b97c0ee 100644 --- a/src/librustc/middle/borrowck/check_loans.rs +++ b/src/librustc/middle/borrowck/check_loans.rs @@ -580,8 +580,8 @@ impl CheckLoanCtxt { } fn check_loans_in_fn(fk: &visit::fn_kind, - decl: ast::fn_decl, - body: ast::blk, + decl: &ast::fn_decl, + body: &ast::blk, sp: span, id: ast::node_id, &&self: @mut CheckLoanCtxt, @@ -753,7 +753,7 @@ fn check_loans_in_expr(expr: @ast::expr, visit::visit_expr(expr, self, vt); } -fn check_loans_in_block(blk: ast::blk, +fn check_loans_in_block(blk: &ast::blk, &&self: @mut CheckLoanCtxt, vt: visit::vt<@mut CheckLoanCtxt>) { do save_and_restore_managed(self.declared_purity) { diff --git a/src/librustc/middle/borrowck/gather_loans.rs b/src/librustc/middle/borrowck/gather_loans.rs index 1b9c935cf7501..eea11fa06754f 100644 --- a/src/librustc/middle/borrowck/gather_loans.rs +++ b/src/librustc/middle/borrowck/gather_loans.rs @@ -96,8 +96,8 @@ pub fn gather_loans(bccx: @BorrowckCtxt, crate: @ast::crate) -> ReqMaps { } fn req_loans_in_fn(fk: &visit::fn_kind, - decl: ast::fn_decl, - body: ast::blk, + decl: &ast::fn_decl, + body: &ast::blk, sp: span, id: ast::node_id, &&self: @mut GatherLoanCtxt, @@ -269,13 +269,13 @@ fn req_loans_in_expr(ex: @ast::expr, (vt.visit_expr)(cond, self, vt); // during body, can only root for the body - self.root_ub = (*body).node.id; - (vt.visit_block)((*body), self, vt); + self.root_ub = body.node.id; + (vt.visit_block)(body, self, vt); } // see explanation attached to the `root_ub` field: ast::expr_loop(ref body, _) => { - self.root_ub = (*body).node.id; + self.root_ub = body.node.id; visit::visit_expr(ex, self, vt); } diff --git a/src/librustc/middle/check_loop.rs b/src/librustc/middle/check_loop.rs index 34665fe7b7d27..15f64b0fa2f0c 100644 --- a/src/librustc/middle/check_loop.rs +++ b/src/librustc/middle/check_loop.rs @@ -33,10 +33,10 @@ pub fn check_crate(tcx: ty::ctxt, crate: @crate) { match e.node { expr_while(e, ref b) => { (v.visit_expr)(e, cx, v); - (v.visit_block)((*b), Context { in_loop: true,.. cx }, v); + (v.visit_block)(b, Context { in_loop: true,.. cx }, v); } expr_loop(ref b, _) => { - (v.visit_block)((*b), Context { in_loop: true,.. cx }, v); + (v.visit_block)(b, Context { in_loop: true,.. cx }, v); } expr_fn(*) => { visit::visit_expr(e, Context { @@ -45,7 +45,7 @@ pub fn check_crate(tcx: ty::ctxt, crate: @crate) { }, v); } expr_fn_block(_, ref b) => { - (v.visit_block)((*b), Context { + (v.visit_block)(b, Context { in_loop: false, can_ret: false }, v); @@ -53,10 +53,10 @@ pub fn check_crate(tcx: ty::ctxt, crate: @crate) { expr_loop_body(@expr {node: expr_fn_block(_, ref b), _}) => { let sigil = ty::ty_closure_sigil(ty::expr_ty(tcx, e)); let blk = (sigil == BorrowedSigil); - (v.visit_block)((*b), Context { + (v.visit_block)(b, Context { in_loop: true, can_ret: blk - }, v); + }, v); } expr_break(_) => { if !cx.in_loop { diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index ce44ef9c07175..8666ae0fa448a 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -670,8 +670,8 @@ pub fn check_local(cx: @MatchCheckCtxt, pub fn check_fn(cx: @MatchCheckCtxt, kind: &visit::fn_kind, - decl: fn_decl, - body: blk, + decl: &fn_decl, + body: &blk, sp: span, id: node_id, &&s: (), diff --git a/src/librustc/middle/freevars.rs b/src/librustc/middle/freevars.rs index 81c4b1ae70c99..f69e0e2d454de 100644 --- a/src/librustc/middle/freevars.rs +++ b/src/librustc/middle/freevars.rs @@ -39,7 +39,7 @@ pub type freevar_map = HashMap; // Since we want to be able to collect upvars in some arbitrary piece // of the AST, we take a walker function that we invoke with a visitor // in order to start the search. -fn collect_freevars(def_map: resolve::DefMap, blk: ast::blk) +fn collect_freevars(def_map: resolve::DefMap, blk: &ast::blk) -> freevar_info { let seen = HashMap(); let refs = @mut ~[]; @@ -100,8 +100,8 @@ pub fn annotate_freevars(def_map: resolve::DefMap, crate: @ast::crate) -> freevar_map { let freevars = HashMap(); - let walk_fn = fn@(_fk: &visit::fn_kind, _decl: ast::fn_decl, - blk: ast::blk, _sp: span, nid: ast::node_id) { + let walk_fn = fn@(_fk: &visit::fn_kind, _decl: &ast::fn_decl, + blk: &ast::blk, _sp: span, nid: ast::node_id) { let vars = collect_freevars(def_map, blk); freevars.insert(nid, vars); }; diff --git a/src/librustc/middle/kind.rs b/src/librustc/middle/kind.rs index 2d87f0419799c..1b1e9afa924d5 100644 --- a/src/librustc/middle/kind.rs +++ b/src/librustc/middle/kind.rs @@ -153,8 +153,14 @@ fn with_appropriate_checker(cx: Context, id: node_id, b: fn(check_fn)) { // Check that the free variables used in a shared/sendable closure conform // to the copy/move kind bounds. Then recursively check the function body. -fn check_fn(fk: &visit::fn_kind, decl: fn_decl, body: blk, sp: span, - fn_id: node_id, cx: Context, v: visit::vt) { +fn check_fn( + fk: &visit::fn_kind, + decl: &fn_decl, + body: &blk, + sp: span, + fn_id: node_id, + cx: Context, + v: visit::vt) { // Check kinds on free variables: do with_appropriate_checker(cx, fn_id) |chk| { @@ -166,7 +172,7 @@ fn check_fn(fk: &visit::fn_kind, decl: fn_decl, body: blk, sp: span, visit::visit_fn(fk, decl, body, sp, fn_id, cx, v); } -fn check_arm(a: arm, cx: Context, v: visit::vt) { +fn check_arm(a: &arm, cx: Context, v: visit::vt) { for vec::each(a.pats) |p| { do pat_util::pat_bindings(cx.tcx.def_map, *p) |mode, id, span, _pth| { if mode == bind_by_copy { diff --git a/src/librustc/middle/lint.rs b/src/librustc/middle/lint.rs index 95be98be77680..c69bba8c25d5c 100644 --- a/src/librustc/middle/lint.rs +++ b/src/librustc/middle/lint.rs @@ -726,7 +726,7 @@ fn check_item_structural_records(cx: ty::ctxt, it: @ast::item) { fn check_item_ctypes(cx: ty::ctxt, it: @ast::item) { fn check_foreign_fn(cx: ty::ctxt, fn_id: ast::node_id, - decl: ast::fn_decl) { + decl: &ast::fn_decl) { let tys = vec::map(decl.inputs, |a| a.ty ); for vec::each(vec::append_one(tys, decl.output)) |ty| { match ty.node { @@ -760,7 +760,7 @@ fn check_item_ctypes(cx: ty::ctxt, it: @ast::item) { either::Right(ast::foreign_abi_rust_intrinsic) => { for nmod.items.each |ni| { match /*bad*/copy ni.node { - ast::foreign_item_fn(decl, _, _) => { + ast::foreign_item_fn(ref decl, _, _) => { check_foreign_fn(cx, it.id, decl); } // FIXME #4622: Not implemented. @@ -923,8 +923,8 @@ fn check_item_non_camel_case_types(cx: ty::ctxt, it: @ast::item) { } } -fn check_fn(tcx: ty::ctxt, fk: &visit::fn_kind, decl: ast::fn_decl, - _body: ast::blk, span: span, id: ast::node_id) { +fn check_fn(tcx: ty::ctxt, fk: &visit::fn_kind, decl: &ast::fn_decl, + _body: &ast::blk, span: span, id: ast::node_id) { debug!("lint check_fn fk=%? id=%?", fk, id); // don't complain about blocks, since they tend to get their modes @@ -938,7 +938,7 @@ fn check_fn(tcx: ty::ctxt, fk: &visit::fn_kind, decl: ast::fn_decl, check_fn_deprecated_modes(tcx, fn_ty, decl, span, id); } -fn check_fn_deprecated_modes(tcx: ty::ctxt, fn_ty: ty::t, decl: ast::fn_decl, +fn check_fn_deprecated_modes(tcx: ty::ctxt, fn_ty: ty::t, decl: &ast::fn_decl, span: span, id: ast::node_id) { match ty::get(fn_ty).sty { ty::ty_closure(ty::ClosureTy {sig: ref sig, _}) | @@ -990,7 +990,7 @@ fn check_fn_deprecated_modes(tcx: ty::ctxt, fn_ty: ty::t, decl: ast::fn_decl, ast::ty_closure(@ast::TyClosure{decl: ref d, _}) | ast::ty_bare_fn(@ast::TyBareFn{decl: ref d, _})=>{ check_fn_deprecated_modes(tcx, arg_ty.ty, - *d, span, id); + d, span, id); } ast::ty_path(*) => { // This is probably a typedef, so we can't @@ -1026,7 +1026,7 @@ fn check_item_deprecated_modes(tcx: ty::ctxt, it: @ast::item) { ast::ty_bare_fn(@ast::TyBareFn {decl: ref decl, _}) => { let fn_ty = ty::node_id_to_type(tcx, it.id); check_fn_deprecated_modes( - tcx, fn_ty, *decl, ty.span, it.id) + tcx, fn_ty, decl, ty.span, it.id) } _ => () } diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 739882258b2d7..e6c8a3f1aa537 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -436,8 +436,8 @@ impl IrMaps { } fn visit_fn(fk: &visit::fn_kind, - decl: fn_decl, - body: blk, + decl: &fn_decl, + body: &blk, sp: span, id: node_id, &&self: @mut IrMaps, @@ -540,7 +540,7 @@ fn visit_local(local: @local, &&self: @mut IrMaps, vt: vt<@mut IrMaps>) { visit::visit_local(local, self, vt); } -fn visit_arm(arm: arm, &&self: @mut IrMaps, vt: vt<@mut IrMaps>) { +fn visit_arm(arm: &arm, &&self: @mut IrMaps, vt: vt<@mut IrMaps>) { let def_map = self.tcx.def_map; for arm.pats.each |pat| { do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| { @@ -979,7 +979,7 @@ impl Liveness { // _______________________________________________________________________ - fn compute(&self, decl: fn_decl, body: blk) -> LiveNode { + fn compute(&self, decl: &fn_decl, body: &blk) -> LiveNode { // if there is a `break` or `again` at the top level, then it's // effectively a return---this only occurs in `for` loops, // where the body is really a closure. @@ -1004,7 +1004,7 @@ impl Liveness { entry_ln } - fn propagate_through_fn_block(&self, decl: fn_decl, blk: blk) + fn propagate_through_fn_block(&self, decl: &fn_decl, blk: &blk) -> LiveNode { // inputs passed by & mode should be considered live on exit: for decl.inputs.each |arg| { @@ -1038,7 +1038,7 @@ impl Liveness { self.propagate_through_block(blk, self.s.fallthrough_ln) } - fn propagate_through_block(&self, blk: blk, succ: LiveNode) -> LiveNode { + fn propagate_through_block(&self, blk: &blk, succ: LiveNode) -> LiveNode { let succ = self.propagate_through_opt_expr(blk.node.expr, succ); do blk.node.stmts.foldr(succ) |stmt, succ| { self.propagate_through_stmt(*stmt, succ) @@ -1134,7 +1134,7 @@ impl Liveness { The next-node for a break is the successor of the entire loop. The next-node for a continue is the top of this loop. */ - self.with_loop_nodes((*blk).node.id, succ, + self.with_loop_nodes(blk.node.id, succ, self.live_node(expr.id, expr.span), || { // the construction of a closure itself is not important, @@ -1164,7 +1164,7 @@ impl Liveness { // ( succ ) // let else_ln = self.propagate_through_opt_expr(els, succ); - let then_ln = self.propagate_through_block((*then), succ); + let then_ln = self.propagate_through_block(then, succ); let ln = self.live_node(expr.id, expr.span); self.init_from_succ(ln, else_ln); self.merge_from_succ(ln, then_ln, false); @@ -1172,13 +1172,13 @@ impl Liveness { } expr_while(cond, ref blk) => { - self.propagate_through_loop(expr, Some(cond), (*blk), succ) + self.propagate_through_loop(expr, Some(cond), blk, succ) } // Note that labels have been resolved, so we don't need to look // at the label ident expr_loop(ref blk, _) => { - self.propagate_through_loop(expr, None, (*blk), succ) + self.propagate_through_loop(expr, None, blk, succ) } expr_match(e, ref arms) => { @@ -1199,9 +1199,9 @@ impl Liveness { let ln = self.live_node(expr.id, expr.span); self.init_empty(ln, succ); let mut first_merge = true; - for (*arms).each |arm| { + for arms.each |arm| { let body_succ = - self.propagate_through_block(arm.body, succ); + self.propagate_through_block(&arm.body, succ); let guard_succ = self.propagate_through_opt_expr(arm.guard, body_succ); let arm_succ = @@ -1362,7 +1362,7 @@ impl Liveness { } expr_block(ref blk) => { - self.propagate_through_block((*blk), succ) + self.propagate_through_block(blk, succ) } expr_mac(*) => { @@ -1463,7 +1463,7 @@ impl Liveness { fn propagate_through_loop(&self, expr: @expr, cond: Option<@expr>, - body: blk, + body: &blk, succ: LiveNode) -> LiveNode { /* @@ -1568,7 +1568,7 @@ fn check_local(local: @local, &&self: @Liveness, vt: vt<@Liveness>) { visit::visit_local(local, self, vt); } -fn check_arm(arm: arm, &&self: @Liveness, vt: vt<@Liveness>) { +fn check_arm(arm: &arm, &&self: @Liveness, vt: vt<@Liveness>) { do self.arm_pats_bindings(arm.pats) |ln, var, sp| { self.warn_about_unused(sp, ln, var); } @@ -1639,8 +1639,8 @@ fn check_expr(expr: @expr, &&self: @Liveness, vt: vt<@Liveness>) { } } -fn check_fn(_fk: &visit::fn_kind, _decl: fn_decl, - _body: blk, _sp: span, _id: node_id, +fn check_fn(_fk: &visit::fn_kind, _decl: &fn_decl, + _body: &blk, _sp: span, _id: node_id, &&_self: @Liveness, _v: vt<@Liveness>) { // do not check contents of nested fns } @@ -1885,7 +1885,7 @@ impl @Liveness { if name[0] == ('_' as u8) { None } else { Some(name) } } - fn warn_about_unused_args(&self, decl: fn_decl, entry_ln: LiveNode) { + fn warn_about_unused_args(&self, decl: &fn_decl, entry_ln: LiveNode) { for decl.inputs.each |arg| { do pat_util::pat_bindings(self.tcx.def_map, arg.pat) |_bm, p_id, sp, _n| { diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 7782a58356b30..74f485d08af49 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -221,7 +221,7 @@ pub fn record_parent(cx: ctxt, child_id: ast::node_id) { } } -pub fn resolve_block(blk: ast::blk, cx: ctxt, visitor: visit::vt) { +pub fn resolve_block(blk: &ast::blk, cx: ctxt, visitor: visit::vt) { // Record the parent of this block. record_parent(cx, blk.node.id); @@ -230,7 +230,7 @@ pub fn resolve_block(blk: ast::blk, cx: ctxt, visitor: visit::vt) { visit::visit_block(blk, new_cx, visitor); } -pub fn resolve_arm(arm: ast::arm, cx: ctxt, visitor: visit::vt) { +pub fn resolve_arm(arm: &ast::arm, cx: ctxt, visitor: visit::vt) { visit::visit_arm(arm, cx, visitor); } @@ -317,8 +317,8 @@ pub fn resolve_item(item: @ast::item, cx: ctxt, visitor: visit::vt) { } pub fn resolve_fn(fk: &visit::fn_kind, - decl: ast::fn_decl, - body: ast::blk, + decl: &ast::fn_decl, + body: &ast::blk, sp: span, id: ast::node_id, cx: ctxt, @@ -608,8 +608,8 @@ pub fn determine_rp_in_item(item: @ast::item, } pub fn determine_rp_in_fn(fk: &visit::fn_kind, - decl: ast::fn_decl, - body: ast::blk, + decl: &ast::fn_decl, + body: &ast::blk, _: span, _: ast::node_id, &&cx: @mut DetermineRpCtxt, @@ -626,7 +626,7 @@ pub fn determine_rp_in_fn(fk: &visit::fn_kind, } } -pub fn determine_rp_in_ty_method(ty_m: ast::ty_method, +pub fn determine_rp_in_ty_method(ty_m: &ast::ty_method, &&cx: @mut DetermineRpCtxt, visitor: visit::vt<@mut DetermineRpCtxt>) { do cx.with(cx.item_id, false) { diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index afcc0298a95d6..cff6603cf0532 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -1027,7 +1027,7 @@ pub impl Resolver { } } - fn block_needs_anonymous_module(@mut self, block: blk) -> bool { + fn block_needs_anonymous_module(@mut self, block: &blk) -> bool { // If the block has view items, we need an anonymous module. if block.node.view_items.len() > 0 { return true; @@ -1276,7 +1276,7 @@ pub impl Resolver { // methods, so check that first. let mut has_static_methods = false; for (*methods).each |method| { - let ty_m = trait_method_to_ty_method(*method); + let ty_m = trait_method_to_ty_method(method); match ty_m.self_ty.node { sty_static => { has_static_methods = true; @@ -1304,7 +1304,7 @@ pub impl Resolver { // Add the names of all the methods to the trait info. let method_names = @HashMap(); for (*methods).each |method| { - let ty_m = trait_method_to_ty_method(*method); + let ty_m = trait_method_to_ty_method(method); let ident = ty_m.ident; // Add it to the trait info if not static, @@ -1534,7 +1534,7 @@ pub impl Resolver { } fn build_reduced_graph_for_block(@mut self, - block: blk, + block: &blk, parent: ReducedGraphParent, &&visitor: vt) { let mut new_parent; @@ -3788,7 +3788,7 @@ pub impl Resolver { item.id, 0, OpaqueFunctionRibKind), - (*block), + block, NoSelfBinding, visitor); } @@ -3866,7 +3866,7 @@ pub impl Resolver { rib_kind: RibKind, optional_declaration: Option<@fn_decl>, type_parameters: TypeParameters, - block: blk, + block: &blk, self_binding: SelfBinding, visitor: ResolveVisitor) { // Create a value rib for the function. @@ -3982,7 +3982,7 @@ pub impl Resolver { self.resolve_function(NormalRibKind, None, NoTypeParameters, - (*destructor).node.body, + &destructor.node.body, HasSelfBinding ((*destructor).node.self_id, true), @@ -4015,7 +4015,7 @@ pub impl Resolver { self.resolve_function(rib_kind, Some(@/*bad*/copy method.decl), type_parameters, - method.body, + &method.body, self_binding, visitor); } @@ -4140,7 +4140,7 @@ pub impl Resolver { return result; } - fn check_consistent_bindings(@mut self, arm: arm) { + fn check_consistent_bindings(@mut self, arm: &arm) { if arm.pats.len() == 0 { return; } let map_0 = self.binding_mode_map(arm.pats[0]); for arm.pats.eachi() |i, p| { @@ -4179,7 +4179,7 @@ pub impl Resolver { } } - fn resolve_arm(@mut self, arm: arm, visitor: ResolveVisitor) { + fn resolve_arm(@mut self, arm: &arm, visitor: ResolveVisitor) { (*self.value_ribs).push(@Rib(NormalRibKind)); let bindings_list = HashMap(); @@ -4193,12 +4193,12 @@ pub impl Resolver { self.check_consistent_bindings(arm); visit_expr_opt(arm.guard, (), visitor); - self.resolve_block(arm.body, visitor); + self.resolve_block(&arm.body, visitor); (*self.value_ribs).pop(); } - fn resolve_block(@mut self, block: blk, visitor: ResolveVisitor) { + fn resolve_block(@mut self, block: &blk, visitor: ResolveVisitor) { debug!("(resolving block) entering block"); (*self.value_ribs).push(@Rib(NormalRibKind)); @@ -4910,7 +4910,7 @@ pub impl Resolver { self.resolve_function(FunctionRibKind(expr.id, block.node.id), Some(@/*bad*/copy *fn_decl), NoTypeParameters, - (*block), + block, NoSelfBinding, visitor); } diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index 9723e2cda26b8..d289621645928 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -2093,7 +2093,7 @@ pub fn trans_item(ccx: @CrateContext, item: ast::item) { vec::append(/*bad*/copy *path, ~[path_name(item.ident)]), decl, body, llfndecl, no_self, None, item.id, None); } else { - for vec::each((*body).node.stmts) |stmt| { + for body.node.stmts.each |stmt| { match stmt.node { ast::stmt_decl(@codemap::spanned { node: ast::decl_item(i), _ }, _) => { @@ -2108,7 +2108,7 @@ pub fn trans_item(ccx: @CrateContext, item: ast::item) { meth::trans_impl(ccx, /*bad*/copy *path, item.ident, ms, tps, None, item.id); } - ast::item_mod(m) => { + ast::item_mod(ref m) => { trans_mod(ccx, m); } ast::item_enum(ref enum_definition, ref tps) => { @@ -2121,11 +2121,10 @@ pub fn trans_item(ccx: @CrateContext, item: ast::item) { } } ast::item_const(_, expr) => consts::trans_const(ccx, expr, item.id), - ast::item_foreign_mod(foreign_mod) => { + ast::item_foreign_mod(ref foreign_mod) => { let abi = match attr::foreign_abi(item.attrs) { - either::Right(abi_) => abi_, - either::Left(ref msg) => ccx.sess.span_fatal(item.span, - /*bad*/copy *msg) + Right(abi_) => abi_, + Left(ref msg) => ccx.sess.span_fatal(item.span, /*bad*/copy *msg) }; foreign::trans_foreign_mod(ccx, foreign_mod, abi); } @@ -2165,9 +2164,9 @@ pub fn trans_struct_def(ccx: @CrateContext, struct_def: @ast::struct_def, // separate modules in the compiled program. That's because modules exist // only as a convenience for humans working with the code, to organize names // and control visibility. -pub fn trans_mod(ccx: @CrateContext, m: ast::_mod) { +pub fn trans_mod(ccx: @CrateContext, m: &ast::_mod) { let _icx = ccx.insn_ctxt("trans_mod"); - for vec::each(m.items) |item| { + for m.items.each |item| { trans_item(ccx, **item); } } @@ -3130,7 +3129,7 @@ pub fn trans_crate(sess: session::Session, { let _icx = ccx.insn_ctxt("text"); - trans_mod(ccx, crate.node.module); + trans_mod(ccx, &crate.node.module); } decl_gc_metadata(ccx, llmod_id); diff --git a/src/librustc/middle/trans/callee.rs b/src/librustc/middle/trans/callee.rs index c1dac70ae97d9..3782bb9c3fdce 100644 --- a/src/librustc/middle/trans/callee.rs +++ b/src/librustc/middle/trans/callee.rs @@ -390,7 +390,7 @@ pub fn trans_rtcall_or_lang_call_with_type_params(bcx: block, ArgVals(args), dest, DontAutorefArg); } -pub fn body_contains_ret(body: ast::blk) -> bool { +pub fn body_contains_ret(body: &ast::blk) -> bool { let cx = @mut false; visit::visit_block(body, cx, visit::mk_vt(@visit::Visitor { visit_item: |_i, _cx, _v| { }, @@ -424,7 +424,7 @@ pub fn trans_call_inner( ast::expr_loop_body(@ast::expr { node: ast::expr_fn_block(_, ref body), _ - }) => body_contains_ret((*body)), + }) => body_contains_ret(body), _ => false } } diff --git a/src/librustc/middle/trans/foreign.rs b/src/librustc/middle/trans/foreign.rs index 3d1d70abefda1..b0a2539b81eb5 100644 --- a/src/librustc/middle/trans/foreign.rs +++ b/src/librustc/middle/trans/foreign.rs @@ -202,7 +202,7 @@ fn build_wrap_fn_(ccx: @CrateContext, // function itself is unnecessary). We used to do this, in fact, and will // perhaps do so in the future. pub fn trans_foreign_mod(ccx: @CrateContext, - foreign_mod: ast::foreign_mod, + foreign_mod: &ast::foreign_mod, abi: ast::foreign_abi) { let _icx = ccx.insn_ctxt("foreign::trans_foreign_mod"); diff --git a/src/librustc/middle/trans/meth.rs b/src/librustc/middle/trans/meth.rs index 02ea29f915d69..825dcbb44d204 100644 --- a/src/librustc/middle/trans/meth.rs +++ b/src/librustc/middle/trans/meth.rs @@ -310,7 +310,7 @@ pub fn trans_static_method_callee(bcx: block, let mname = if method_id.crate == ast::local_crate { match bcx.tcx().items.get(&method_id.node) { ast_map::node_trait_method(trait_method, _, _) => { - ast_util::trait_method_to_ty_method(*trait_method).ident + ast_util::trait_method_to_ty_method(trait_method).ident } _ => fail!(~"callee is not a trait method") } diff --git a/src/librustc/middle/trans/reachable.rs b/src/librustc/middle/trans/reachable.rs index b917021add224..ac2abcac1e276 100644 --- a/src/librustc/middle/trans/reachable.rs +++ b/src/librustc/middle/trans/reachable.rs @@ -109,7 +109,7 @@ fn traverse_public_item(cx: ctx, item: @item) { item_fn(_, _, ref tps, ref blk) => { if tps.len() > 0u || attr::find_inline_attr(item.attrs) != attr::ia_none { - traverse_inline_body(cx, (*blk)); + traverse_inline_body(cx, blk); } } item_impl(tps, _, _, ms) => { @@ -117,7 +117,7 @@ fn traverse_public_item(cx: ctx, item: @item) { if tps.len() > 0u || m.tps.len() > 0u || attr::find_inline_attr(m.attrs) != attr::ia_none { cx.rmap.insert(m.id, ()); - traverse_inline_body(cx, m.body); + traverse_inline_body(cx, &m.body); } } } @@ -129,7 +129,7 @@ fn traverse_public_item(cx: ctx, item: @item) { cx.rmap.insert(dtor.node.id, ()); if tps.len() > 0u || attr::find_inline_attr(dtor.node.attrs) != attr::ia_none { - traverse_inline_body(cx, dtor.node.body); + traverse_inline_body(cx, &dtor.node.body); } } } @@ -168,7 +168,7 @@ fn traverse_ty(ty: @Ty, cx: ctx, v: visit::vt) { } } -fn traverse_inline_body(cx: ctx, body: blk) { +fn traverse_inline_body(cx: ctx, body: &blk) { fn traverse_expr(e: @expr, cx: ctx, v: visit::vt) { match e.node { expr_path(_) => { @@ -217,7 +217,7 @@ fn traverse_inline_body(cx: ctx, body: blk) { fn traverse_item(i: @item, cx: ctx, _v: visit::vt) { traverse_public_item(cx, i); } - visit::visit_block(body, cx, visit::mk_vt(@visit::Visitor { + visit::visit_block(body, cx, visit::mk_vt(@visit::Visitor { visit_expr: traverse_expr, visit_item: traverse_item, ..*visit::default_visitor() diff --git a/src/librustc/middle/trans/type_use.rs b/src/librustc/middle/trans/type_use.rs index 3013f928b75da..47762ce45741a 100644 --- a/src/librustc/middle/trans/type_use.rs +++ b/src/librustc/middle/trans/type_use.rs @@ -103,7 +103,7 @@ pub fn type_uses_for(ccx: @CrateContext, fn_id: def_id, n_tps: uint) ast_map::node_item(@ast::item { node: item_fn(_, _, _, ref body), _ }, _) | ast_map::node_method(@ast::method {body: ref body, _}, _, _) => { - handle_body(cx, (*body)); + handle_body(cx, body); } ast_map::node_trait_method(*) => { // This will be a static trait method. For now, we just assume @@ -159,8 +159,8 @@ pub fn type_uses_for(ccx: @CrateContext, fn_id: def_id, n_tps: uint) for uint::range(0u, n_tps) |n| { cx.uses[n] |= flags;} } } - ast_map::node_dtor(_, dtor, _, _) => { - handle_body(cx, dtor.node.body); + ast_map::node_dtor(_, ref dtor, _, _) => { + handle_body(cx, &dtor.node.body); } ast_map::node_struct_ctor(*) => { // Similarly to node_variant, this monomorphized function just uses @@ -359,7 +359,7 @@ pub fn mark_for_expr(cx: Context, e: @expr) { } } -pub fn handle_body(cx: Context, body: blk) { +pub fn handle_body(cx: Context, body: &blk) { let v = visit::mk_vt(@visit::Visitor { visit_expr: |e, cx, v| { visit::visit_expr(e, cx, v); diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index 239e86623cabd..3dedd169c8566 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -3850,7 +3850,7 @@ pub fn item_path(cx: ctxt, id: ast::def_id) -> ast_map::path { ast_map::path_name(method.ident)) } ast_map::node_trait_method(trait_method, _, path) => { - let method = ast_util::trait_method_to_ty_method(*trait_method); + let method = ast_util::trait_method_to_ty_method(&*trait_method); vec::append_one(/*bad*/copy *path, ast_map::path_name(method.ident)) } diff --git a/src/librustc/middle/typeck/astconv.rs b/src/librustc/middle/typeck/astconv.rs index 9269752b8ecf4..c026cc4845c4e 100644 --- a/src/librustc/middle/typeck/astconv.rs +++ b/src/librustc/middle/typeck/astconv.rs @@ -332,14 +332,14 @@ pub fn ast_ty_to_ty( }; ty::mk_rec(tcx, flds) } - ast::ty_bare_fn(bf) => { + ast::ty_bare_fn(ref bf) => { ty::mk_bare_fn(tcx, ty_of_bare_fn(self, rscope, bf.purity, - bf.abi, bf.decl)) + bf.abi, &bf.decl)) } - ast::ty_closure(f) => { + ast::ty_closure(ref f) => { let fn_decl = ty_of_closure(self, rscope, f.sigil, f.purity, f.onceness, - f.region, f.decl, None, + f.region, &f.decl, None, ast_ty.span); ty::mk_closure(tcx, fn_decl) } @@ -473,7 +473,7 @@ pub fn ty_of_bare_fn( rscope: RS, purity: ast::purity, abi: ast::Abi, - decl: ast::fn_decl) + decl: &ast::fn_decl) -> ty::BareFnTy { debug!("ty_of_fn_decl"); @@ -501,7 +501,7 @@ pub fn ty_of_closure( purity: ast::purity, onceness: ast::Onceness, opt_region: Option<@ast::region>, - decl: ast::fn_decl, + decl: &ast::fn_decl, expected_tys: Option, span: span) -> ty::ClosureTy { diff --git a/src/librustc/middle/typeck/check/_match.rs b/src/librustc/middle/typeck/check/_match.rs index 82f9828db3fb3..8c39beac6bb3d 100644 --- a/src/librustc/middle/typeck/check/_match.rs +++ b/src/librustc/middle/typeck/check/_match.rs @@ -58,7 +58,7 @@ pub fn check_match(fcx: @mut FnCtxt, Some(e) => { check_expr_has_type(fcx, e, ty::mk_bool(tcx)); }, None => () } - if !check_block(fcx, arm.body) { arm_non_bot = true; } + if !check_block(fcx, &arm.body) { arm_non_bot = true; } let bty = fcx.node_ty(arm.body.node.id); demand::suptype(fcx, arm.body.span, result_ty, bty); } diff --git a/src/librustc/middle/typeck/check/mod.rs b/src/librustc/middle/typeck/check/mod.rs index 9d14030d2617a..309a4a9b0fabe 100644 --- a/src/librustc/middle/typeck/check/mod.rs +++ b/src/librustc/middle/typeck/check/mod.rs @@ -244,7 +244,7 @@ pub fn check_item_types(ccx: @mut CrateCtxt, crate: @ast::crate) { pub fn check_bare_fn(ccx: @mut CrateCtxt, decl: &ast::fn_decl, - body: ast::blk, + body: &ast::blk, id: ast::node_id, self_info: Option) { let fty = ty::node_id_to_type(ccx.tcx, id); @@ -264,7 +264,7 @@ pub fn check_fn(ccx: @mut CrateCtxt, sigil: Option, fn_sig: &ty::FnSig, decl: &ast::fn_decl, - body: ast::blk, + body: &ast::blk, fn_kind: FnKind, old_fcx: Option<@mut FnCtxt>) { let tcx = ccx.tcx; @@ -381,7 +381,7 @@ pub fn check_fn(ccx: @mut CrateCtxt, fn gather_locals(fcx: @mut FnCtxt, decl: &ast::fn_decl, - body: ast::blk, + body: &ast::blk, arg_tys: &[ty::t], self_info: Option) { let tcx = fcx.ccx.tcx; @@ -459,7 +459,7 @@ pub fn check_fn(ccx: @mut CrateCtxt, visit::visit_pat(p, e, v); }; - let visit_block = fn@(b: ast::blk, &&e: (), v: visit::vt<()>) { + let visit_block = fn@(b: &ast::blk, &&e: (), v: visit::vt<()>) { // non-obvious: the `blk` variable maps to region lb, so // we have to keep this up-to-date. This // is... unfortunate. It'd be nice to not need this. @@ -469,8 +469,8 @@ pub fn check_fn(ccx: @mut CrateCtxt, }; // Don't descend into fns and items - fn visit_fn(_fk: &visit::fn_kind, _decl: ast::fn_decl, - _body: ast::blk, _sp: span, + fn visit_fn(_fk: &visit::fn_kind, _decl: &ast::fn_decl, + _body: &ast::blk, _sp: span, _id: ast::node_id, &&_t: (), _v: visit::vt<()>) { } fn visit_item(_i: @ast::item, &&_e: (), _v: visit::vt<()>) { } @@ -497,7 +497,13 @@ pub fn check_method(ccx: @mut CrateCtxt, def_id: self_impl_def_id, explicit_self: method.self_ty }; - check_bare_fn(ccx, &method.decl, method.body, method.id, Some(self_info)); + check_bare_fn( + ccx, + &method.decl, + &method.body, + method.id, + Some(self_info) + ); } pub fn check_no_duplicate_fields(tcx: ty::ctxt, @@ -541,9 +547,13 @@ pub fn check_struct(ccx: @mut CrateCtxt, }; // typecheck the dtor let dtor_dec = ast_util::dtor_dec(); - check_bare_fn(ccx, &dtor_dec, - dtor.node.body, dtor.node.id, - Some(class_t)); + check_bare_fn( + ccx, + &dtor_dec, + &dtor.node.body, + dtor.node.id, + Some(class_t) + ); }; // Check that the class is instantiable @@ -565,7 +575,7 @@ pub fn check_item(ccx: @mut CrateCtxt, it: @ast::item) { it.id); } ast::item_fn(ref decl, _, _, ref body) => { - check_bare_fn(ccx, decl, (*body), it.id, None); + check_bare_fn(ccx, decl, body, it.id, None); } ast::item_impl(_, _, ty, ms) => { let rp = ccx.tcx.region_paramd_items.find(&it.id); @@ -1397,7 +1407,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, fn check_for(fcx: @mut FnCtxt, local: @ast::local, element_ty: ty::t, - body: ast::blk, + body: &ast::blk, node_id: ast::node_id) -> bool { let local_ty = fcx.local_ty(local.span, local.node.id); @@ -1411,7 +1421,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, // A generic function for checking the then and else in an if // or if-check fn check_then_else(fcx: @mut FnCtxt, - thn: ast::blk, + thn: &ast::blk, elsopt: Option<@ast::expr>, id: ast::node_id, _sp: span) @@ -1609,7 +1619,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, expr: @ast::expr, ast_sigil_opt: Option, decl: &ast::fn_decl, - body: ast::blk, + body: &ast::blk, fn_kind: FnKind, expected: Option) { let tcx = fcx.ccx.tcx; @@ -1653,7 +1663,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, let mut fn_ty = astconv::ty_of_closure( fcx, fcx, sigil, purity, expected_onceness, - None, *decl, expected_tys, expr.span); + None, decl, expected_tys, expr.span); let fty = ty::mk_closure(tcx, copy fn_ty); @@ -2094,7 +2104,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, // message because of the indirect_ret_ty. let fn_kind = if err_happened {Vanilla} else {ForLoop}; check_expr_fn(fcx, loop_body, None, - decl, *body, fn_kind, Some(inner_ty)); + decl, body, fn_kind, Some(inner_ty)); demand::suptype(fcx, loop_body.span, inner_ty, fcx.expr_ty(loop_body)); } @@ -2341,28 +2351,28 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, } ast::expr_if(cond, ref thn, elsopt) => { bot = check_expr_has_type(fcx, cond, ty::mk_bool(tcx)); - bot |= check_then_else(fcx, *thn, elsopt, id, expr.span); + bot |= check_then_else(fcx, thn, elsopt, id, expr.span); } ast::expr_while(cond, ref body) => { bot = check_expr_has_type(fcx, cond, ty::mk_bool(tcx)); - check_block_no_value(fcx, (*body)); + check_block_no_value(fcx, body); fcx.write_ty(id, ty::mk_nil(tcx)); } ast::expr_loop(ref body, _) => { - check_block_no_value(fcx, (*body)); + check_block_no_value(fcx, body); fcx.write_ty(id, ty::mk_nil(tcx)); - bot = !may_break(tcx, expr.id, (*body)); + bot = !may_break(tcx, expr.id, body); } ast::expr_match(discrim, ref arms) => { bot = _match::check_match(fcx, expr, discrim, (/*bad*/copy *arms)); } ast::expr_fn(sigil, ref decl, ref body, _) => { check_expr_fn(fcx, expr, Some(sigil), - decl, (*body), Vanilla, expected); + decl, body, Vanilla, expected); } ast::expr_fn_block(ref decl, ref body) => { check_expr_fn(fcx, expr, None, - decl, (*body), Vanilla, expected); + decl, body, Vanilla, expected); } ast::expr_loop_body(loop_body) => { check_loop_body(fcx, expr, expected, loop_body); @@ -2392,7 +2402,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, match b.node { ast::expr_fn_block(ref decl, ref body) => { check_expr_fn(fcx, b, None, - decl, *body, DoBlock, Some(inner_ty)); + decl, body, DoBlock, Some(inner_ty)); demand::suptype(fcx, b.span, inner_ty, fcx.expr_ty(b)); } // argh @@ -2402,7 +2412,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt, } ast::expr_block(ref b) => { // If this is an unchecked block, turn off purity-checking - bot = check_block_with_expected(fcx, *b, expected); + bot = check_block_with_expected(fcx, b, expected); let typ = match b.node.expr { Some(expr) => fcx.expr_ty(expr), @@ -2692,7 +2702,7 @@ pub fn check_stmt(fcx: @mut FnCtxt, stmt: @ast::stmt) -> bool { return bot; } -pub fn check_block_no_value(fcx: @mut FnCtxt, blk: ast::blk) -> bool { +pub fn check_block_no_value(fcx: @mut FnCtxt, blk: &ast::blk) -> bool { let bot = check_block(fcx, blk); if !bot { let blkty = fcx.node_ty(blk.node.id); @@ -2702,12 +2712,12 @@ pub fn check_block_no_value(fcx: @mut FnCtxt, blk: ast::blk) -> bool { return bot; } -pub fn check_block(fcx0: @mut FnCtxt, blk: ast::blk) -> bool { +pub fn check_block(fcx0: @mut FnCtxt, blk: &ast::blk) -> bool { check_block_with_expected(fcx0, blk, None) } pub fn check_block_with_expected(fcx0: @mut FnCtxt, - blk: ast::blk, + blk: &ast::blk, expected: Option) -> bool { let fcx = match blk.node.rules { @@ -3102,7 +3112,7 @@ pub fn ast_expr_vstore_to_vstore(fcx: @mut FnCtxt, } // Returns true if b contains a break that can exit from b -pub fn may_break(cx: ty::ctxt, id: ast::node_id, b: ast::blk) -> bool { +pub fn may_break(cx: ty::ctxt, id: ast::node_id, b: &ast::blk) -> bool { // First: is there an unlabeled break immediately // inside the loop? (loop_query(b, |e| { diff --git a/src/librustc/middle/typeck/check/regionck.rs b/src/librustc/middle/typeck/check/regionck.rs index c91607489ecec..bf6ddc4f5421e 100644 --- a/src/librustc/middle/typeck/check/regionck.rs +++ b/src/librustc/middle/typeck/check/regionck.rs @@ -125,7 +125,7 @@ pub fn regionck_expr(fcx: @mut FnCtxt, e: @ast::expr) { fcx.infcx().resolve_regions(); } -pub fn regionck_fn(fcx: @mut FnCtxt, blk: ast::blk) { +pub fn regionck_fn(fcx: @mut FnCtxt, blk: &ast::blk) { let rcx = @mut Rcx { fcx: fcx, errors_reported: 0 }; let v = regionck_visitor(); (v.visit_block)(blk, rcx, v); @@ -176,7 +176,7 @@ pub fn visit_local(l: @ast::local, &&rcx: @mut Rcx, v: rvt) { } } -pub fn visit_block(b: ast::blk, &&rcx: @mut Rcx, v: rvt) { +pub fn visit_block(b: &ast::blk, &&rcx: @mut Rcx, v: rvt) { visit::visit_block(b, rcx, v); } diff --git a/src/librustc/middle/typeck/check/vtable.rs b/src/librustc/middle/typeck/check/vtable.rs index e6b837fa9492a..c8cd7c05fe864 100644 --- a/src/librustc/middle/typeck/check/vtable.rs +++ b/src/librustc/middle/typeck/check/vtable.rs @@ -731,7 +731,7 @@ pub fn resolve_expr(ex: @ast::expr, // Detect points where a trait-bounded type parameter is // instantiated, resolve the impls for the parameters. -pub fn resolve_in_block(fcx: @mut FnCtxt, bl: ast::blk) { +pub fn resolve_in_block(fcx: @mut FnCtxt, bl: &ast::blk) { visit::visit_block(bl, fcx, visit::mk_vt(@visit::Visitor { visit_expr: resolve_expr, visit_item: |_,_,_| {}, diff --git a/src/librustc/middle/typeck/check/writeback.rs b/src/librustc/middle/typeck/check/writeback.rs index a16dab4bdeaef..7fb896a902ca1 100644 --- a/src/librustc/middle/typeck/check/writeback.rs +++ b/src/librustc/middle/typeck/check/writeback.rs @@ -200,7 +200,7 @@ fn visit_expr(e: @ast::expr, &&wbcx: @mut WbCtxt, v: wb_vt) { } visit::visit_expr(e, wbcx, v); } -fn visit_block(b: ast::blk, &&wbcx: @mut WbCtxt, v: wb_vt) { +fn visit_block(b: &ast::blk, &&wbcx: @mut WbCtxt, v: wb_vt) { if !wbcx.success { return; } resolve_type_vars_for_node(wbcx, b.span, b.node.id); visit::visit_block(b, wbcx, v); @@ -260,7 +260,7 @@ pub fn resolve_type_vars_in_expr(fcx: @mut FnCtxt, e: @ast::expr) -> bool { pub fn resolve_type_vars_in_fn(fcx: @mut FnCtxt, decl: &ast::fn_decl, - blk: ast::blk, + blk: &ast::blk, self_info: Option) -> bool { let wbcx = @mut WbCtxt { fcx: fcx, success: true }; let visit = mk_visitor(); diff --git a/src/librustc/middle/typeck/collect.rs b/src/librustc/middle/typeck/collect.rs index 96c76b52181c1..0c44c20b25e99 100644 --- a/src/librustc/middle/typeck/collect.rs +++ b/src/librustc/middle/typeck/collect.rs @@ -228,7 +228,7 @@ pub fn ensure_trait_methods(ccx: @mut CrateCtxt, } fn make_static_method_ty(ccx: @mut CrateCtxt, - am: ast::ty_method, + am: &ast::ty_method, rp: Option, m: ty::method, // Take this as an argument b/c we may check @@ -289,10 +289,11 @@ pub fn ensure_trait_methods(ccx: @mut CrateCtxt, } let trait_bounds = ty_param_bounds(ccx, *params); - let ty_m = trait_method_to_ty_method(*m); - let method_ty = ty_of_ty_method(ccx, ty_m, region_paramd, def_id); + let ty_m = trait_method_to_ty_method(m); + let method_ty = ty_of_ty_method(ccx, &ty_m, region_paramd, + def_id); if ty_m.self_ty.node == ast::sty_static { - make_static_method_ty(ccx, ty_m, region_paramd, + make_static_method_ty(ccx, &ty_m, region_paramd, method_ty, trait_ty, trait_bounds); } @@ -689,7 +690,7 @@ pub fn convert_struct(ccx: @mut CrateCtxt, astconv::ty_of_bare_fn( ccx, type_rscope(rp), ast::impure_fn, ast::RustAbi, - ast_util::dtor_dec())); + &ast_util::dtor_dec())); write_ty_to_tcx(tcx, dtor.node.id, t_dtor); tcx.tcache.insert(local_def(dtor.node.id), ty_param_bounds_and_ty { @@ -748,7 +749,7 @@ pub fn ty_of_method(ccx: @mut CrateCtxt, ident: m.ident, tps: ty_param_bounds(ccx, m.tps), fty: astconv::ty_of_bare_fn(ccx, type_rscope(rp), m.purity, - ast::RustAbi, m.decl), + ast::RustAbi, &m.decl), self_ty: m.self_ty.node, vis: m.vis, def_id: local_def(m.id) @@ -756,14 +757,14 @@ pub fn ty_of_method(ccx: @mut CrateCtxt, } pub fn ty_of_ty_method(self: @mut CrateCtxt, - m: ast::ty_method, + m: &ast::ty_method, rp: Option, id: ast::def_id) -> ty::method { ty::method { ident: m.ident, tps: ty_param_bounds(self, m.tps), fty: astconv::ty_of_bare_fn(self, type_rscope(rp), m.purity, - ast::RustAbi, m.decl), + ast::RustAbi, &m.decl), // assume public, because this is only invoked on trait methods self_ty: m.self_ty.node, vis: ast::public, @@ -819,7 +820,7 @@ pub fn ty_of_item(ccx: @mut CrateCtxt, it: @ast::item) ast::item_fn(decl, purity, tps, _) => { let bounds = ty_param_bounds(ccx, tps); let tofd = astconv::ty_of_bare_fn(ccx, empty_rscope, purity, - ast::RustAbi, decl); + ast::RustAbi, &decl); let tpt = ty_param_bounds_and_ty { bounds: bounds, region_param: None, @@ -903,17 +904,17 @@ pub fn ty_of_item(ccx: @mut CrateCtxt, it: @ast::item) pub fn ty_of_foreign_item(ccx: @mut CrateCtxt, it: @ast::foreign_item) -> ty::ty_param_bounds_and_ty { match /*bad*/copy it.node { - ast::foreign_item_fn(fn_decl, _, params) => { - return ty_of_foreign_fn_decl(ccx, fn_decl, params, local_def(it.id)); - } - ast::foreign_item_const(t) => { - let rb = in_binding_rscope(empty_rscope); - return ty::ty_param_bounds_and_ty { - bounds: @~[], - region_param: None, - ty: ast_ty_to_ty(ccx, rb, t) - }; - } + ast::foreign_item_fn(ref fn_decl, _, ref params) => { + ty_of_foreign_fn_decl(ccx, fn_decl, *params, local_def(it.id)) + } + ast::foreign_item_const(t) => { + let rb = in_binding_rscope(empty_rscope); + ty::ty_param_bounds_and_ty { + bounds: @~[], + region_param: None, + ty: ast_ty_to_ty(ccx, rb, t) + } + } } } @@ -958,7 +959,7 @@ pub fn compute_bounds(ccx: @mut CrateCtxt, } pub fn ty_param_bounds(ccx: @mut CrateCtxt, - params: ~[ast::ty_param]) + params: &[ast::ty_param]) -> @~[ty::param_bounds] { @do params.map |param| { match ccx.tcx.ty_param_bounds.find(¶m.id) { @@ -973,8 +974,8 @@ pub fn ty_param_bounds(ccx: @mut CrateCtxt, } pub fn ty_of_foreign_fn_decl(ccx: @mut CrateCtxt, - decl: ast::fn_decl, - +ty_params: ~[ast::ty_param], + decl: &ast::fn_decl, + ty_params: &[ast::ty_param], def_id: ast::def_id) -> ty::ty_param_bounds_and_ty { let bounds = ty_param_bounds(ccx, ty_params); diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index 7b980b9de0db6..59fe85cf173cd 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -56,7 +56,7 @@ pub fn field_exprs(fields: ~[ast::field]) -> ~[@ast::expr] { // Takes a predicate p, returns true iff p is true for any subexpressions // of b -- skipping any inner loops (loop, while, loop_body) -pub fn loop_query(b: ast::blk, p: fn@(ast::expr_) -> bool) -> bool { +pub fn loop_query(b: &ast::blk, p: fn@(ast::expr_) -> bool) -> bool { let rs = @mut false; let visit_expr: @fn(@ast::expr, &&flag: @mut bool, @@ -79,7 +79,7 @@ pub fn loop_query(b: ast::blk, p: fn@(ast::expr_) -> bool) -> bool { // Takes a predicate p, returns true iff p is true for any subexpressions // of b -- skipping any inner loops (loop, while, loop_body) -pub fn block_query(b: ast::blk, p: fn@(@ast::expr) -> bool) -> bool { +pub fn block_query(b: &ast::blk, p: fn@(@ast::expr) -> bool) -> bool { let rs = @mut false; let visit_expr: @fn(@ast::expr, &&flag: @mut bool, diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 8b0c84cff8343..f262b2219e9c9 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -70,7 +70,7 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region) re_scope(node_id) => { match cx.items.find(&node_id) { Some(ast_map::node_block(ref blk)) => { - explain_span(cx, "block", (*blk).span) + explain_span(cx, "block", blk.span) } Some(ast_map::node_expr(expr)) => { match expr.node { @@ -108,7 +108,7 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region) match cx.items.find(&id) { Some(ast_map::node_block(ref blk)) => { - let (msg, opt_span) = explain_span(cx, "block", (*blk).span); + let (msg, opt_span) = explain_span(cx, "block", blk.span); (fmt!("%s %s", prefix, msg), opt_span) } Some(_) | None => { @@ -159,7 +159,7 @@ pub fn re_scope_id_to_str(cx: ctxt, node_id: ast::node_id) -> ~str { match cx.items.find(&node_id) { Some(ast_map::node_block(ref blk)) => { fmt!("", - cx.sess.codemap.span_to_str((*blk).span)) + cx.sess.codemap.span_to_str(blk.span)) } Some(ast_map::node_expr(expr)) => { match expr.node { diff --git a/src/librustpkg/util.rs b/src/librustpkg/util.rs index 64a6d9c50554a..fdabe86359a6b 100644 --- a/src/librustpkg/util.rs +++ b/src/librustpkg/util.rs @@ -76,7 +76,7 @@ struct ReadyCtx { fns: ~[ListenerFn] } -fn fold_mod(_ctx: @mut ReadyCtx, m: ast::_mod, +fn fold_mod(_ctx: @mut ReadyCtx, m: &ast::_mod, fold: fold::ast_fold) -> ast::_mod { fn strip_main(item: @ast::item) -> @ast::item { @ast::item { @@ -87,11 +87,11 @@ fn fold_mod(_ctx: @mut ReadyCtx, m: ast::_mod, } } - fold::noop_fold_mod(ast::_mod { - items: do vec::map(m.items) |item| { + fold::noop_fold_mod(&ast::_mod { + items: do m.items.map |item| { strip_main(*item) }, - .. m + .. copy *m }, fold) } @@ -199,7 +199,7 @@ pub fn ready_crate(sess: session::Session, let fold = fold::make_fold(precursor); - @fold.fold_crate(*crate) + @fold.fold_crate(crate) } pub fn parse_vers(vers: ~str) -> result::Result { diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index 8386f3c0498c3..b56dfeffd7a77 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -186,8 +186,8 @@ pub fn map_decoded_item(diag: span_handler, pub fn map_fn( fk: &visit::fn_kind, - decl: fn_decl, - body: blk, + decl: &fn_decl, + body: &blk, sp: codemap::span, id: node_id, &&cx: @mut Ctx, @@ -205,7 +205,7 @@ pub fn map_fn( id: id, attrs: /* FIXME (#2543) */ vec::from_slice(*attrs), self_id: self_id, - body: /* FIXME (#2543) */ copy body, + body: /* FIXME (#2543) */ copy *body, }, span: sp, }; @@ -222,8 +222,8 @@ pub fn map_fn( visit::visit_fn(fk, decl, body, sp, id, cx, v); } -pub fn map_block(b: blk, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { - cx.map.insert(b.node.id, node_block(/* FIXME (#2543) */ copy b)); +pub fn map_block(b: &blk, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { + cx.map.insert(b.node.id, node_block(/* FIXME (#2543) */ copy *b)); visit::visit_block(b, cx, v); } @@ -244,7 +244,7 @@ pub fn map_local(loc: @local, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { visit::visit_local(loc, cx, v); } -pub fn map_arm(arm: arm, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { +pub fn map_arm(arm: &arm, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { number_pat(cx, arm.pats[0]); visit::visit_arm(arm, cx, v); } @@ -308,7 +308,7 @@ pub fn map_item(i: @item, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { cx.map.insert(p.ref_id, node_item(i, item_path)); } for methods.each |tm| { - let id = ast_util::trait_method_to_ty_method(*tm).id; + let id = ast_util::trait_method_to_ty_method(tm).id; let d_id = ast_util::local_def(i.id); cx.map.insert(id, node_trait_method(@*tm, d_id, item_path)); } @@ -387,14 +387,14 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str { fmt!("method %s in %s (id=%?)", *itr.get(m.ident), path_to_str(*path, itr), id) } - Some(node_trait_method(tm, _, path)) => { - let m = ast_util::trait_method_to_ty_method(*tm); + Some(node_trait_method(ref tm, _, path)) => { + let m = ast_util::trait_method_to_ty_method(&**tm); fmt!("method %s in %s (id=%?)", *itr.get(m.ident), path_to_str(*path, itr), id) } Some(node_variant(ref variant, _, path)) => { fmt!("variant %s in %s (id=%?)", - *itr.get((*variant).node.name), path_to_str(*path, itr), id) + *itr.get(variant.node.name), path_to_str(*path, itr), id) } Some(node_expr(expr)) => { fmt!("expr %s (id=%?)", pprust::expr_to_str(expr, itr), id) diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 3830c81f08417..7bf08aaaf76fc 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -260,9 +260,9 @@ pub fn public_methods(ms: ~[@method]) -> ~[@method] { // extract a ty_method from a trait_method. if the trait_method is // a default, pull out the useful fields to make a ty_method -pub fn trait_method_to_ty_method(method: trait_method) -> ty_method { - match method { - required(ref m) => (*m), +pub fn trait_method_to_ty_method(method: &trait_method) -> ty_method { + match *method { + required(ref m) => copy *m, provided(ref m) => { ty_method { ident: m.ident, @@ -278,7 +278,7 @@ pub fn trait_method_to_ty_method(method: trait_method) -> ty_method { } } -pub fn split_trait_methods(trait_methods: ~[trait_method]) +pub fn split_trait_methods(trait_methods: &[trait_method]) -> (~[ty_method], ~[@method]) { let mut reqd = ~[], provd = ~[]; for trait_methods.each |trt_method| { @@ -425,7 +425,7 @@ pub fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { vfn(l.node.id); }, - visit_block: fn@(b: blk) { + visit_block: fn@(b: &blk) { vfn(b.node.id); }, @@ -433,7 +433,7 @@ pub fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { vfn(ast_util::stmt_id(*s)); }, - visit_arm: fn@(_a: arm) { }, + visit_arm: fn@(_a: &arm) { }, visit_pat: fn@(p: @pat) { vfn(p.id) @@ -463,8 +463,8 @@ pub fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { } }, - visit_fn: fn@(fk: &visit::fn_kind, d: ast::fn_decl, - _b: ast::blk, _sp: span, id: ast::node_id) { + visit_fn: fn@(fk: &visit::fn_kind, d: &ast::fn_decl, + _b: &ast::blk, _sp: span, id: ast::node_id) { vfn(id); match *fk { @@ -491,10 +491,10 @@ pub fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { } }, - visit_ty_method: fn@(_ty_m: ty_method) { + visit_ty_method: fn@(_ty_m: &ty_method) { }, - visit_trait_method: fn@(_ty_m: trait_method) { + visit_trait_method: fn@(_ty_m: &trait_method) { }, visit_struct_def: fn@(_sd: @struct_def, _id: ident, _tps: &[ty_param], diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index dad1c2a7feafe..5b2d6f7600b54 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -183,28 +183,28 @@ pub fn contains(haystack: &[@ast::meta_item], } fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool { - return match a.node { - ast::meta_word(ref na) => match b.node { + match a.node { + ast::meta_word(ref na) => match b.node { ast::meta_word(ref nb) => (*na) == (*nb), _ => false - }, - ast::meta_name_value(ref na, va) => match b.node { + }, + ast::meta_name_value(ref na, va) => match b.node { ast::meta_name_value(ref nb, vb) => { (*na) == (*nb) && va.node == vb.node } _ => false - }, - ast::meta_list(ref na, misa) => match b.node { - ast::meta_list(ref nb, misb) => { + }, + ast::meta_list(ref na, ref misa) => match b.node { + ast::meta_list(ref nb, ref misb) => { if na != nb { return false; } - for misa.each |&mi| { - if !contains(misb, mi) { return false; } + for misa.each |mi| { + if !misb.contains(mi) { return false; } } true } _ => false - } } + } } pub fn contains_name(metas: &[@ast::meta_item], name: &str) -> bool { @@ -260,21 +260,23 @@ pub fn last_meta_item_list_by_name(items: ~[@ast::meta_item], name: &str) /* Higher-level applications */ -pub fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] { +pub fn sort_meta_items(items: &[@ast::meta_item]) -> ~[@ast::meta_item] { // This is sort of stupid here, converting to a vec of mutables and back - let mut v = items; + let mut v = vec::from_slice(items); do std::sort::quick_sort(v) |ma, mb| { get_meta_item_name(*ma) <= get_meta_item_name(*mb) } // There doesn't seem to be a more optimal way to do this - do v.map |&m| { + do v.map |m| { match m.node { - ast::meta_list(n, mis) => @spanned { - node: ast::meta_list(n, sort_meta_items(mis)), - .. *m - }, - _ => m + ast::meta_list(n, ref mis) => { + @spanned { + node: ast::meta_list(n, sort_meta_items(*mis)), + .. /*bad*/ copy **m + } + } + _ => /*bad*/ copy *m } } } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 0b2aaa89d9b3a..d26b566ecf67a 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -24,63 +24,71 @@ use core::vec; use std::oldmap::HashMap; pub fn expand_expr(exts: SyntaxExtensions, cx: ext_ctxt, - e: expr_, s: span, fld: ast_fold, - orig: fn@(expr_, span, ast_fold) -> (expr_, span)) + e: &expr_, s: span, fld: ast_fold, + orig: fn@(&expr_, span, ast_fold) -> (expr_, span)) -> (expr_, span) { - return match e { - // expr_mac should really be expr_ext or something; it's the - // entry-point for all syntax extensions. - expr_mac(ref mac) => { - + match *e { + // expr_mac should really be expr_ext or something; it's the + // entry-point for all syntax extensions. + expr_mac(ref mac) => { match (*mac).node { - - // Token-tree macros, these will be the only case when we're - // finished transitioning. - mac_invoc_tt(pth, ref tts) => { - assert (vec::len(pth.idents) == 1u); - /* using idents and token::special_idents would make the - the macro names be hygienic */ - let extname = cx.parse_sess().interner.get(pth.idents[0]); - match exts.find(&extname) { - None => { - cx.span_fatal(pth.span, - fmt!("macro undefined: '%s'", *extname)) - } - Some(NormalTT(SyntaxExpanderTT{expander: exp, - span: exp_sp})) => { - cx.bt_push(ExpandedFrom(CallInfo{ - call_site: s, - callee: NameAndSpan { - name: *extname, span: exp_sp + // Token-tree macros, these will be the only case when we're + // finished transitioning. + mac_invoc_tt(pth, ref tts) => { + assert (vec::len(pth.idents) == 1u); + /* using idents and token::special_idents would make the + the macro names be hygienic */ + let extname = cx.parse_sess().interner.get(pth.idents[0]); + match exts.find(&extname) { + None => { + cx.span_fatal( + pth.span, + fmt!("macro undefined: '%s'", *extname)) } - })); - - let expanded = match exp(cx, (*mac).span, (*tts)) { - MRExpr(e) => e, - MRAny(expr_maker,_,_) => expr_maker(), - _ => cx.span_fatal( - pth.span, fmt!("non-expr macro in expr pos: %s", - *extname)) - }; - - //keep going, outside-in - let fully_expanded = fld.fold_expr(expanded).node; - cx.bt_pop(); - - (fully_expanded, s) - } - _ => { - cx.span_fatal(pth.span, - fmt!("'%s' is not a tt-style macro", - *extname)) - } - + Some(NormalTT(SyntaxExpanderTT{ + expander: exp, + span: exp_sp + })) => { + cx.bt_push(ExpandedFrom(CallInfo { + call_site: s, + callee: NameAndSpan { + name: *extname, + span: exp_sp, + }, + })); + + let expanded = match exp(cx, mac.span, *tts) { + MRExpr(e) => e, + MRAny(expr_maker,_,_) => expr_maker(), + _ => { + cx.span_fatal( + pth.span, + fmt!( + "non-expr macro in expr pos: %s", + *extname + ) + ) + } + }; + + //keep going, outside-in + let fully_expanded = fld.fold_expr(expanded).node; + cx.bt_pop(); + + (fully_expanded, s) + } + _ => { + cx.span_fatal( + pth.span, + fmt!("'%s' is not a tt-style macro", *extname) + ) + } + } } - } } - } - _ => orig(e, s, fld) - }; + } + _ => orig(e, s, fld) + } } // This is a secondary mechanism for invoking syntax extensions on items: @@ -93,8 +101,8 @@ pub fn expand_expr(exts: SyntaxExtensions, cx: ext_ctxt, // NB: there is some redundancy between this and expand_item, below, and // they might benefit from some amount of semantic and language-UI merger. pub fn expand_mod_items(exts: SyntaxExtensions, cx: ext_ctxt, - module_: ast::_mod, fld: ast_fold, - orig: fn@(ast::_mod, ast_fold) -> ast::_mod) + module_: &ast::_mod, fld: ast_fold, + orig: fn@(&ast::_mod, ast_fold) -> ast::_mod) -> ast::_mod { // Fold the contents first: let module_ = orig(module_, fld); @@ -130,8 +138,8 @@ pub fn expand_mod_items(exts: SyntaxExtensions, cx: ext_ctxt, // When we enter a module, record it, for the sake of `module!` pub fn expand_item(exts: SyntaxExtensions, - cx: ext_ctxt, &&it: @ast::item, fld: ast_fold, - orig: fn@(&&v: @ast::item, ast_fold) -> Option<@ast::item>) + cx: ext_ctxt, it: @ast::item, fld: ast_fold, + orig: fn@(@ast::item, ast_fold) -> Option<@ast::item>) -> Option<@ast::item> { let is_mod = match it.node { ast::item_mod(_) | ast::item_foreign_mod(_) => true, @@ -223,13 +231,13 @@ pub fn expand_item_mac(exts: SyntaxExtensions, } pub fn expand_stmt(exts: SyntaxExtensions, cx: ext_ctxt, - && s: stmt_, sp: span, fld: ast_fold, - orig: fn@(&&s: stmt_, span, ast_fold) -> (stmt_, span)) + s: &stmt_, sp: span, fld: ast_fold, + orig: fn@(s: &stmt_, span, ast_fold) -> (stmt_, span)) -> (stmt_, span) { - let (mac, pth, tts, semi) = match s { + let (mac, pth, tts, semi) = match *s { stmt_mac(ref mac, semi) => { - match (*mac).node { + match mac.node { mac_invoc_tt(pth, ref tts) => ((*mac), pth, (*tts), semi) } } @@ -361,8 +369,7 @@ pub fn expand_crate(parse_sess: @mut parse::ParseSess, // as it registers all the core macros as expanders. f.fold_expr(cm); - let res = @f.fold_crate(*c); - return res; + @f.fold_crate(&*c) } // Local Variables: // mode: rust diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index eaf29d40c3b78..e74376afb0831 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -18,27 +18,27 @@ use core::option; use core::vec; pub trait ast_fold { - fn fold_crate(crate) -> crate; - fn fold_view_item(&&v: @view_item) -> @view_item; - fn fold_foreign_item(&&v: @foreign_item) -> @foreign_item; - fn fold_item(&&v: @item) -> Option<@item>; - fn fold_struct_field(&&v: @struct_field) -> @struct_field; - fn fold_item_underscore(item_) -> item_; - fn fold_method(&&v: @method) -> @method; - fn fold_block(blk) -> blk; - fn fold_stmt(&&v: @stmt) -> @stmt; - fn fold_arm(arm) -> arm; - fn fold_pat(&&v: @pat) -> @pat; - fn fold_decl(&&v: @decl) -> @decl; - fn fold_expr(&&v: @expr) -> @expr; - fn fold_ty(&&v: @Ty) -> @Ty; - fn fold_mod(_mod) -> _mod; - fn fold_foreign_mod(foreign_mod) -> foreign_mod; - fn fold_variant(variant) -> variant; - fn fold_ident(&&v: ident) -> ident; - fn fold_path(&&v: @path) -> @path; - fn fold_local(&&v: @local) -> @local; - fn map_exprs(fn@(&&v: @expr) -> @expr, ~[@expr]) -> ~[@expr]; + fn fold_crate(&crate) -> crate; + fn fold_view_item(@view_item) -> @view_item; + fn fold_foreign_item(@foreign_item) -> @foreign_item; + fn fold_item(@item) -> Option<@item>; + fn fold_struct_field(@struct_field) -> @struct_field; + fn fold_item_underscore(&item_) -> item_; + fn fold_method(@method) -> @method; + fn fold_block(&blk) -> blk; + fn fold_stmt(&stmt) -> @stmt; + fn fold_arm(&arm) -> arm; + fn fold_pat(@pat) -> @pat; + fn fold_decl(@decl) -> @decl; + fn fold_expr(@expr) -> @expr; + fn fold_ty(@Ty) -> @Ty; + fn fold_mod(&_mod) -> _mod; + fn fold_foreign_mod(&foreign_mod) -> foreign_mod; + fn fold_variant(&variant) -> variant; + fn fold_ident(ident) -> ident; + fn fold_path(@path) -> @path; + fn fold_local(@local) -> @local; + fn map_exprs(fn@(@expr) -> @expr, &[@expr]) -> ~[@expr]; fn new_id(node_id) -> node_id; fn new_span(span) -> span; } @@ -47,27 +47,27 @@ pub trait ast_fold { pub struct AstFoldFns { //unlike the others, item_ is non-trivial - fold_crate: fn@(crate_, span, ast_fold) -> (crate_, span), + fold_crate: fn@(&crate_, span, ast_fold) -> (crate_, span), fold_view_item: fn@(view_item_, ast_fold) -> view_item_, - fold_foreign_item: fn@(&&v: @foreign_item, ast_fold) -> @foreign_item, - fold_item: fn@(&&v: @item, ast_fold) -> Option<@item>, - fold_struct_field: fn@(&&v: @struct_field, ast_fold) -> @struct_field, - fold_item_underscore: fn@(item_, ast_fold) -> item_, - fold_method: fn@(&&v: @method, ast_fold) -> @method, - fold_block: fn@(blk_, span, ast_fold) -> (blk_, span), - fold_stmt: fn@(stmt_, span, ast_fold) -> (stmt_, span), - fold_arm: fn@(arm, ast_fold) -> arm, - fold_pat: fn@(pat_, span, ast_fold) -> (pat_, span), - fold_decl: fn@(decl_, span, ast_fold) -> (decl_, span), - fold_expr: fn@(expr_, span, ast_fold) -> (expr_, span), - fold_ty: fn@(ty_, span, ast_fold) -> (ty_, span), - fold_mod: fn@(_mod, ast_fold) -> _mod, - fold_foreign_mod: fn@(foreign_mod, ast_fold) -> foreign_mod, - fold_variant: fn@(variant_, span, ast_fold) -> (variant_, span), - fold_ident: fn@(&&v: ident, ast_fold) -> ident, - fold_path: fn@(path, ast_fold) -> path, - fold_local: fn@(local_, span, ast_fold) -> (local_, span), - map_exprs: fn@(fn@(&&v: @expr) -> @expr, ~[@expr]) -> ~[@expr], + fold_foreign_item: fn@(@foreign_item, ast_fold) -> @foreign_item, + fold_item: fn@(@item, ast_fold) -> Option<@item>, + fold_struct_field: fn@(@struct_field, ast_fold) -> @struct_field, + fold_item_underscore: fn@(&item_, ast_fold) -> item_, + fold_method: fn@(@method, ast_fold) -> @method, + fold_block: fn@(&blk_, span, ast_fold) -> (blk_, span), + fold_stmt: fn@(&stmt_, span, ast_fold) -> (stmt_, span), + fold_arm: fn@(&arm, ast_fold) -> arm, + fold_pat: fn@(&pat_, span, ast_fold) -> (pat_, span), + fold_decl: fn@(&decl_, span, ast_fold) -> (decl_, span), + fold_expr: fn@(&expr_, span, ast_fold) -> (expr_, span), + fold_ty: fn@(&ty_, span, ast_fold) -> (ty_, span), + fold_mod: fn@(&_mod, ast_fold) -> _mod, + fold_foreign_mod: fn@(&foreign_mod, ast_fold) -> foreign_mod, + fold_variant: fn@(&variant_, span, ast_fold) -> (variant_, span), + fold_ident: fn@(ident, ast_fold) -> ident, + fold_path: fn@(@path, ast_fold) -> path, + fold_local: fn@(&local_, span, ast_fold) -> (local_, span), + map_exprs: fn@(fn@(@expr) -> @expr, &[@expr]) -> ~[@expr], new_id: fn@(node_id) -> node_id, new_span: fn@(span) -> span } @@ -77,18 +77,20 @@ pub type ast_fold_fns = @AstFoldFns; /* some little folds that probably aren't useful to have in ast_fold itself*/ //used in noop_fold_item and noop_fold_crate and noop_fold_crate_directive -fn fold_meta_item_(&&mi: @meta_item, fld: ast_fold) -> @meta_item { +fn fold_meta_item_(mi: @meta_item, fld: ast_fold) -> @meta_item { @spanned { node: match mi.node { - meta_word(ref id) => meta_word((*id)), - meta_list(ref id, ref mis) => { + meta_word(id) => meta_word(id), + meta_list(id, ref mis) => { let fold_meta_item = |x| fold_meta_item_(x, fld); - meta_list(/* FIXME: (#2543) */ copy *id, - mis.map(|e| fold_meta_item(*e))) + meta_list( + id, + mis.map(|e| fold_meta_item(*e)) + ) } - meta_name_value(ref id, s) => { - meta_name_value((*id), /* FIXME (#2543) */ copy s) + meta_name_value(id, s) => { + meta_name_value(id, /* FIXME (#2543) */ copy s) } }, span: fld.new_span(mi.span) } @@ -116,13 +118,13 @@ fn fold_arg_(a: arg, fld: ast_fold) -> arg { } //used in noop_fold_expr, and possibly elsewhere in the future fn fold_mac_(m: mac, fld: ast_fold) -> mac { - spanned { node: match m.node { - mac_invoc_tt(*) => m.node, - }, - span: fld.new_span(m.span) } + spanned { + node: match m.node { mac_invoc_tt(*) => copy m.node }, + span: fld.new_span(m.span), + } } -pub fn fold_fn_decl(decl: ast::fn_decl, fld: ast_fold) -> ast::fn_decl { +pub fn fold_fn_decl(decl: &ast::fn_decl, fld: ast_fold) -> ast::fn_decl { ast::fn_decl { inputs: decl.inputs.map(|x| fold_arg_(*x, fld)), output: fld.fold_ty(decl.output), @@ -130,29 +132,34 @@ pub fn fold_fn_decl(decl: ast::fn_decl, fld: ast_fold) -> ast::fn_decl { } } -fn fold_ty_param_bound(tpb: ty_param_bound, fld: ast_fold) -> ty_param_bound { - match tpb { +fn fold_ty_param_bound( + tpb: &ty_param_bound, + fld: ast_fold +) -> ty_param_bound { + match *tpb { TraitTyParamBound(ty) => TraitTyParamBound(fld.fold_ty(ty)), RegionTyParamBound => RegionTyParamBound } } -pub fn fold_ty_param(tp: ty_param, fld: ast_fold) -> ty_param { - ast::ty_param { ident: /* FIXME (#2543) */ copy tp.ident, - id: fld.new_id(tp.id), - bounds: @tp.bounds.map(|x| fold_ty_param_bound(*x, fld) )} +pub fn fold_ty_param(tp: &ty_param, fld: ast_fold) -> ty_param { + ast::ty_param { + ident: /* FIXME (#2543) */ copy tp.ident, + id: fld.new_id(tp.id), + bounds: @tp.bounds.map(|x| fold_ty_param_bound(x, fld)), + } } -pub fn fold_ty_params(tps: ~[ty_param], fld: ast_fold) -> ~[ty_param] { - tps.map(|x| fold_ty_param(*x, fld)) +pub fn fold_ty_params(tps: &[ty_param], fld: ast_fold) -> ~[ty_param] { + tps.map(|x| fold_ty_param(x, fld)) } -pub fn noop_fold_crate(c: crate_, fld: ast_fold) -> crate_ { +pub fn noop_fold_crate(c: &crate_, fld: ast_fold) -> crate_ { let fold_meta_item = |x| fold_meta_item_(x, fld); let fold_attribute = |x| fold_attribute_(x, fld); crate_ { - module: fld.fold_mod(c.module), + module: fld.fold_mod(&c.module), attrs: c.attrs.map(|x| fold_attribute(*x)), config: c.config.map(|x| fold_meta_item(*x)), } @@ -163,17 +170,17 @@ fn noop_fold_view_item(vi: view_item_, _fld: ast_fold) -> view_item_ { } -fn noop_fold_foreign_item(&&ni: @foreign_item, fld: ast_fold) +fn noop_fold_foreign_item(ni: @foreign_item, fld: ast_fold) -> @foreign_item { let fold_arg = |x| fold_arg_(x, fld); let fold_attribute = |x| fold_attribute_(x, fld); @ast::foreign_item { ident: fld.fold_ident(ni.ident), - attrs: vec::map(ni.attrs, |x| fold_attribute(*x)), + attrs: ni.attrs.map(|x| fold_attribute(*x)), node: match ni.node { - foreign_item_fn(fdec, purity, typms) => { + foreign_item_fn(ref fdec, purity, ref tps) => { foreign_item_fn( ast::fn_decl { inputs: fdec.inputs.map(|a| fold_arg(*a)), @@ -181,7 +188,7 @@ fn noop_fold_foreign_item(&&ni: @foreign_item, fld: ast_fold) cf: fdec.cf, }, purity, - fold_ty_params(typms, fld)) + fold_ty_params(*tps, fld)) } foreign_item_const(t) => { foreign_item_const(fld.fold_ty(t)) @@ -193,18 +200,18 @@ fn noop_fold_foreign_item(&&ni: @foreign_item, fld: ast_fold) } } -pub fn noop_fold_item(&&i: @item, fld: ast_fold) -> Option<@item> { +pub fn noop_fold_item(i: @item, fld: ast_fold) -> Option<@item> { let fold_attribute = |x| fold_attribute_(x, fld); Some(@ast::item { ident: fld.fold_ident(i.ident), attrs: i.attrs.map(|e| fold_attribute(*e)), id: fld.new_id(i.id), - node: fld.fold_item_underscore(i.node), + node: fld.fold_item_underscore(&i.node), vis: i.vis, span: fld.new_span(i.span) }) } -fn noop_fold_struct_field(&&sf: @struct_field, fld: ast_fold) +fn noop_fold_struct_field(sf: @struct_field, fld: ast_fold) -> @struct_field { @spanned { node: ast::struct_field_ { kind: copy sf.node.kind, id: sf.node.id, @@ -212,38 +219,51 @@ fn noop_fold_struct_field(&&sf: @struct_field, fld: ast_fold) span: sf.span } } -pub fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ { - match i { - item_const(t, e) => item_const(fld.fold_ty(t), fld.fold_expr(e)), - item_fn(ref decl, purity, ref typms, ref body) => { - item_fn(fold_fn_decl(/* FIXME (#2543) */ copy *decl, fld), - purity, - fold_ty_params(/* FIXME (#2543) */ copy *typms, fld), - fld.fold_block(*body)) - } - item_mod(m) => item_mod(fld.fold_mod(m)), - item_foreign_mod(nm) => item_foreign_mod(fld.fold_foreign_mod(nm)), - item_ty(t, typms) => item_ty(fld.fold_ty(t), - fold_ty_params(typms, fld)), - item_enum(ref enum_definition, ref typms) => { - item_enum(ast::enum_def(ast::enum_def_ { - variants: enum_definition.variants.map( - |x| fld.fold_variant(*x)), - common: enum_definition.common.map( - |x| fold_struct_def(*x, fld)), - }), fold_ty_params(/* FIXME (#2543) */ copy *typms, fld)) - } - item_struct(ref struct_def, ref typms) => { - let struct_def = fold_struct_def( - /* FIXME (#2543) */ copy *struct_def, - fld); - item_struct(struct_def, /* FIXME (#2543) */ copy *typms) +pub fn noop_fold_item_underscore(i: &item_, fld: ast_fold) -> item_ { + match *i { + item_const(t, e) => { + item_const(fld.fold_ty(t), fld.fold_expr(e)) + } + item_fn(ref decl, purity, ref tps, ref body) => { + item_fn( + fold_fn_decl(decl, fld), + purity, + fold_ty_params(*tps, fld), + fld.fold_block(body) + ) + } + item_mod(ref m) => item_mod(fld.fold_mod(m)), + item_foreign_mod(ref nm) => { + item_foreign_mod(fld.fold_foreign_mod(nm)) + } + item_ty(t, ref tps) => { + item_ty(fld.fold_ty(t), fold_ty_params(*tps, fld)) + } + item_enum(ref enum_definition, ref tps) => { + item_enum( + ast::enum_def( + ast::enum_def_ { + variants: do enum_definition.variants.map |x| { + fld.fold_variant(x) + }, + common: do enum_definition.common.map |x| { + fold_struct_def(*x, fld) + } + } + ), + fold_ty_params(*tps, fld)) + } + item_struct(ref struct_def, ref tps) => { + let struct_def = fold_struct_def(*struct_def, fld); + item_struct(struct_def, /* FIXME (#2543) */ copy *tps) } item_impl(ref tps, ifce, ty, ref methods) => { - item_impl(fold_ty_params(/* FIXME (#2543) */ copy *tps, fld), - ifce.map(|p| fold_trait_ref(*p, fld)), - fld.fold_ty(ty), - methods.map(|x| fld.fold_method(*x))) + item_impl( + fold_ty_params(*tps, fld), + ifce.map(|p| fold_trait_ref(*p, fld)), + fld.fold_ty(ty), + methods.map(|x| fld.fold_method(*x)) + ) } item_trait(ref tps, ref traits, ref methods) => { let methods = do methods.map |method| { @@ -252,9 +272,11 @@ pub fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ { provided(method) => provided(fld.fold_method(method)) } }; - item_trait(fold_ty_params(/* FIXME (#2543) */ copy *tps, fld), - traits.map(|p| fold_trait_ref(*p, fld)), - methods) + item_trait( + fold_ty_params(*tps, fld), + traits.map(|p| fold_trait_ref(*p, fld)), + methods + ) } item_mac(ref m) => { // FIXME #2888: we might actually want to do something here. @@ -266,12 +288,16 @@ pub fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ { fn fold_struct_def(struct_def: @ast::struct_def, fld: ast_fold) -> @ast::struct_def { let dtor = do option::map(&struct_def.dtor) |dtor| { - let dtor_body = fld.fold_block(dtor.node.body); + let dtor_body = fld.fold_block(&dtor.node.body); let dtor_id = fld.new_id(dtor.node.id); - spanned { node: ast::struct_dtor_ { body: dtor_body, - id: dtor_id, - .. dtor.node}, - span: dtor.span } + spanned { + node: ast::struct_dtor_ { + body: dtor_body, + id: dtor_id, + .. copy dtor.node + }, + span: copy dtor.span + } }; @ast::struct_def { fields: struct_def.fields.map(|f| fold_struct_field(*f, fld)), @@ -280,29 +306,33 @@ fn fold_struct_def(struct_def: @ast::struct_def, fld: ast_fold) } } -fn fold_trait_ref(&&p: @trait_ref, fld: ast_fold) -> @trait_ref { +fn fold_trait_ref(p: @trait_ref, fld: ast_fold) -> @trait_ref { @ast::trait_ref { path: fld.fold_path(p.path), ref_id: fld.new_id(p.ref_id), } } -fn fold_struct_field(&&f: @struct_field, fld: ast_fold) -> @struct_field { - @spanned { node: ast::struct_field_ { kind: copy f.node.kind, - id: fld.new_id(f.node.id), - ty: fld.fold_ty(f.node.ty) }, - span: fld.new_span(f.span) } +fn fold_struct_field(f: @struct_field, fld: ast_fold) -> @struct_field { + @spanned { + node: ast::struct_field_ { + kind: copy f.node.kind, + id: fld.new_id(f.node.id), + ty: fld.fold_ty(f.node.ty), + }, + span: fld.new_span(f.span), + } } -fn noop_fold_method(&&m: @method, fld: ast_fold) -> @method { +fn noop_fold_method(m: @method, fld: ast_fold) -> @method { @ast::method { ident: fld.fold_ident(m.ident), attrs: /* FIXME (#2543) */ copy m.attrs, tps: fold_ty_params(m.tps, fld), self_ty: m.self_ty, purity: m.purity, - decl: fold_fn_decl(m.decl, fld), - body: fld.fold_block(m.body), + decl: fold_fn_decl(&m.decl, fld), + body: fld.fold_block(&m.body), id: fld.new_id(m.id), span: fld.new_span(m.span), self_id: fld.new_id(m.self_id), @@ -311,7 +341,7 @@ fn noop_fold_method(&&m: @method, fld: ast_fold) -> @method { } -pub fn noop_fold_block(b: blk_, fld: ast_fold) -> blk_ { +pub fn noop_fold_block(b: &blk_, fld: ast_fold) -> blk_ { ast::blk_ { view_items: b.view_items.map(|x| fld.fold_view_item(*x)), stmts: b.stmts.map(|x| fld.fold_stmt(*x)), @@ -321,38 +351,42 @@ pub fn noop_fold_block(b: blk_, fld: ast_fold) -> blk_ { } } -fn noop_fold_stmt(s: stmt_, fld: ast_fold) -> stmt_ { +fn noop_fold_stmt(s: &stmt_, fld: ast_fold) -> stmt_ { let fold_mac = |x| fold_mac_(x, fld); - return match s { - stmt_decl(d, nid) => stmt_decl(fld.fold_decl(d), fld.new_id(nid)), - stmt_expr(e, nid) => stmt_expr(fld.fold_expr(e), fld.new_id(nid)), - stmt_semi(e, nid) => stmt_semi(fld.fold_expr(e), fld.new_id(nid)), - stmt_mac(ref mac, semi) => stmt_mac(fold_mac((*mac)), semi) - }; + match *s { + stmt_decl(d, nid) => stmt_decl(fld.fold_decl(d), fld.new_id(nid)), + stmt_expr(e, nid) => stmt_expr(fld.fold_expr(e), fld.new_id(nid)), + stmt_semi(e, nid) => stmt_semi(fld.fold_expr(e), fld.new_id(nid)), + stmt_mac(ref mac, semi) => stmt_mac(fold_mac((*mac)), semi) + } } -fn noop_fold_arm(a: arm, fld: ast_fold) -> arm { +fn noop_fold_arm(a: &arm, fld: ast_fold) -> arm { arm { - pats: vec::map(a.pats, |x| fld.fold_pat(*x)), - guard: option::map(&a.guard, |x| fld.fold_expr(*x)), - body: fld.fold_block(a.body), - } -} - -pub fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ { - return match p { - pat_wild => pat_wild, - pat_ident(binding_mode, pth, sub) => { - pat_ident(binding_mode, - fld.fold_path(pth), - option::map(&sub, |x| fld.fold_pat(*x))) - } - pat_lit(e) => pat_lit(fld.fold_expr(e)), - pat_enum(pth, pats) => { - pat_enum(fld.fold_path(pth), option::map(&pats, - |pats| vec::map(*pats, |x| fld.fold_pat(*x)))) - } - pat_rec(fields, etc) => { + pats: a.pats.map(|x| fld.fold_pat(*x)), + guard: a.guard.map(|x| fld.fold_expr(*x)), + body: fld.fold_block(&a.body), + } +} + +pub fn noop_fold_pat(p: &pat_, fld: ast_fold) -> pat_ { + match *p { + pat_wild => pat_wild, + pat_ident(binding_mode, pth, sub) => { + pat_ident( + binding_mode, + fld.fold_path(pth), + sub.map(|x| fld.fold_pat(*x)) + ) + } + pat_lit(e) => pat_lit(fld.fold_expr(e)), + pat_enum(pth, pats) => { + pat_enum( + fld.fold_path(pth), + pats.map(|pats| pats.map(|x| fld.fold_pat(*x))) + ) + } + pat_rec(fields, etc) => { let fs = do fields.map |f| { ast::field_pat { ident: /* FIXME (#2543) */ copy f.ident, @@ -360,8 +394,8 @@ pub fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ { } }; pat_rec(fs, etc) - } - pat_struct(pth, fields, etc) => { + } + pat_struct(pth, fields, etc) => { let pth_ = fld.fold_path(pth); let fs = do fields.map |f| { ast::field_pat { @@ -370,40 +404,44 @@ pub fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ { } }; pat_struct(pth_, fs, etc) - } - pat_tup(elts) => pat_tup(vec::map(elts, |x| fld.fold_pat(*x))), - pat_box(inner) => pat_box(fld.fold_pat(inner)), - pat_uniq(inner) => pat_uniq(fld.fold_pat(inner)), - pat_region(inner) => pat_region(fld.fold_pat(inner)), - pat_range(e1, e2) => { + } + pat_tup(ref elts) => pat_tup(elts.map(|x| fld.fold_pat(*x))), + pat_box(inner) => pat_box(fld.fold_pat(inner)), + pat_uniq(inner) => pat_uniq(fld.fold_pat(inner)), + pat_region(inner) => pat_region(fld.fold_pat(inner)), + pat_range(e1, e2) => { pat_range(fld.fold_expr(e1), fld.fold_expr(e2)) - }, - pat_vec(elts, tail) => pat_vec( - vec::map(elts, |x| fld.fold_pat(*x)), - option::map(&tail, |tail| fld.fold_pat(*tail)) - ) - }; + }, + pat_vec(elts, tail) => { + pat_vec( + elts.map(|x| fld.fold_pat(*x)), + tail.map(|tail| fld.fold_pat(*tail)) + ) + } + } } -fn noop_fold_decl(d: decl_, fld: ast_fold) -> decl_ { - match d { - decl_local(ls) => decl_local(vec::map(ls, |x| fld.fold_local(*x))), - decl_item(it) => match fld.fold_item(it) { - Some(it_folded) => decl_item(it_folded), - None => decl_local(~[]) - } +fn noop_fold_decl(d: &decl_, fld: ast_fold) -> decl_ { + match *d { + decl_local(ls) => decl_local(ls.map(|x| fld.fold_local(*x))), + decl_item(it) => { + match fld.fold_item(it) { + Some(it_folded) => decl_item(it_folded), + None => decl_local(~[]), + } + } } } -pub fn wrap(f: fn@(T, ast_fold) -> T) - -> fn@(T, span, ast_fold) -> (T, span) +pub fn wrap(f: fn@(&T, ast_fold) -> T) + -> fn@(&T, span, ast_fold) -> (T, span) { - return fn@(x: T, s: span, fld: ast_fold) -> (T, span) { + fn@(x: &T, s: span, fld: ast_fold) -> (T, span) { (f(x, fld), s) } } -pub fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ { +pub fn noop_fold_expr(e: &expr_, fld: ast_fold) -> expr_ { fn fold_field_(field: field, fld: ast_fold) -> field { spanned { node: ast::field_ { @@ -418,157 +456,196 @@ pub fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ { let fold_mac = |x| fold_mac_(x, fld); - return match e { - expr_vstore(e, v) => { + match *e { + expr_vstore(e, v) => { expr_vstore(fld.fold_expr(e), v) - } - expr_vec(exprs, mutt) => { - expr_vec(fld.map_exprs(|x| fld.fold_expr(x), exprs), mutt) - } - expr_repeat(expr, count, mutt) => - expr_repeat(fld.fold_expr(expr), fld.fold_expr(count), mutt), - expr_rec(ref fields, maybe_expr) => { - expr_rec(vec::map((*fields), |x| fold_field(*x)), - option::map(&maybe_expr, |x| fld.fold_expr(*x))) - } - expr_tup(elts) => expr_tup(vec::map(elts, |x| fld.fold_expr(*x))), - expr_call(f, args, blk) => { - expr_call(fld.fold_expr(f), - fld.map_exprs(|x| fld.fold_expr(x), args), - blk) - } - expr_method_call(f, i, tps, args, blk) => { - expr_method_call(fld.fold_expr(f), - fld.fold_ident(i), - vec::map(tps, |x| fld.fold_ty(*x)), - fld.map_exprs(|x| fld.fold_expr(x), args), - blk) - } - expr_binary(binop, lhs, rhs) => { + } + expr_vec(ref exprs, mutt) => { + expr_vec(fld.map_exprs(|x| fld.fold_expr(x), *exprs), mutt) + } + expr_repeat(expr, count, mutt) => { + expr_repeat(fld.fold_expr(expr), fld.fold_expr(count), mutt) + } + expr_rec(ref fields, maybe_expr) => { + expr_rec( + fields.map(|x| fold_field(*x)), + maybe_expr.map(|x| fld.fold_expr(*x)) + ) + } + expr_tup(ref elts) => expr_tup(elts.map(|x| fld.fold_expr(*x))), + expr_call(f, ref args, blk) => { + expr_call( + fld.fold_expr(f), + fld.map_exprs(|x| fld.fold_expr(x), *args), + blk + ) + } + expr_method_call(f, i, ref tps, ref args, blk) => { + expr_method_call( + fld.fold_expr(f), + fld.fold_ident(i), + tps.map(|x| fld.fold_ty(*x)), + fld.map_exprs(|x| fld.fold_expr(x), *args), + blk + ) + } + expr_binary(binop, lhs, rhs) => { expr_binary(binop, fld.fold_expr(lhs), fld.fold_expr(rhs)) - } - expr_unary(binop, ohs) => expr_unary(binop, fld.fold_expr(ohs)), - expr_loop_body(f) => expr_loop_body(fld.fold_expr(f)), - expr_do_body(f) => expr_do_body(fld.fold_expr(f)), - expr_lit(_) => copy e, - expr_cast(expr, ty) => expr_cast(fld.fold_expr(expr), ty), - expr_addr_of(m, ohs) => expr_addr_of(m, fld.fold_expr(ohs)), - expr_if(cond, ref tr, fl) => { - expr_if(fld.fold_expr(cond), fld.fold_block((*tr)), - option::map(&fl, |x| fld.fold_expr(*x))) - } - expr_while(cond, ref body) => { - expr_while(fld.fold_expr(cond), fld.fold_block((*body))) - } - expr_loop(ref body, opt_ident) => { - expr_loop(fld.fold_block((*body)), - option::map(&opt_ident, |x| fld.fold_ident(*x))) - } - expr_match(expr, ref arms) => { - expr_match(fld.fold_expr(expr), - vec::map((*arms), |x| fld.fold_arm(*x))) - } - expr_fn(proto, ref decl, ref body, _) => { - expr_fn(proto, - fold_fn_decl(/* FIXME (#2543) */ copy *decl, fld), - fld.fold_block(*body), - @()) - } - expr_fn_block(ref decl, ref body) => { - expr_fn_block(fold_fn_decl(/* FIXME (#2543) */ copy *decl, fld), - fld.fold_block(*body)) - } - expr_block(ref blk) => expr_block(fld.fold_block((*blk))), - expr_copy(e) => expr_copy(fld.fold_expr(e)), - expr_assign(el, er) => { + } + expr_unary(binop, ohs) => expr_unary(binop, fld.fold_expr(ohs)), + expr_loop_body(f) => expr_loop_body(fld.fold_expr(f)), + expr_do_body(f) => expr_do_body(fld.fold_expr(f)), + expr_lit(_) => copy *e, + expr_cast(expr, ty) => expr_cast(fld.fold_expr(expr), ty), + expr_addr_of(m, ohs) => expr_addr_of(m, fld.fold_expr(ohs)), + expr_if(cond, ref tr, fl) => { + expr_if( + fld.fold_expr(cond), + fld.fold_block(tr), + fl.map(|x| fld.fold_expr(*x)) + ) + } + expr_while(cond, ref body) => { + expr_while(fld.fold_expr(cond), fld.fold_block(body)) + } + expr_loop(ref body, opt_ident) => { + expr_loop( + fld.fold_block(body), + opt_ident.map(|x| fld.fold_ident(*x)) + ) + } + expr_match(expr, ref arms) => { + expr_match( + fld.fold_expr(expr), + arms.map(|x| fld.fold_arm(x)) + ) + } + expr_fn(proto, ref decl, ref body, _) => { + expr_fn( + proto, + fold_fn_decl(decl, fld), + fld.fold_block(body), + @() + ) + } + expr_fn_block(ref decl, ref body) => { + expr_fn_block( + fold_fn_decl(decl, fld), + fld.fold_block(body) + ) + } + expr_block(ref blk) => expr_block(fld.fold_block(blk)), + expr_copy(e) => expr_copy(fld.fold_expr(e)), + expr_assign(el, er) => { expr_assign(fld.fold_expr(el), fld.fold_expr(er)) - } - expr_swap(el, er) => { + } + expr_swap(el, er) => { expr_swap(fld.fold_expr(el), fld.fold_expr(er)) - } - expr_assign_op(op, el, er) => { + } + expr_assign_op(op, el, er) => { expr_assign_op(op, fld.fold_expr(el), fld.fold_expr(er)) - } - expr_field(el, id, tys) => { - expr_field(fld.fold_expr(el), fld.fold_ident(id), - vec::map(tys, |x| fld.fold_ty(*x))) - } - expr_index(el, er) => { + } + expr_field(el, id, ref tys) => { + expr_field( + fld.fold_expr(el), fld.fold_ident(id), + tys.map(|x| fld.fold_ty(*x)) + ) + } + expr_index(el, er) => { expr_index(fld.fold_expr(el), fld.fold_expr(er)) - } - expr_path(pth) => expr_path(fld.fold_path(pth)), - expr_break(opt_ident) => - expr_break(option::map(&opt_ident, |x| fld.fold_ident(*x))), - expr_again(opt_ident) => - expr_again(option::map(&opt_ident, |x| fld.fold_ident(*x))), - expr_ret(e) => expr_ret(option::map(&e, |x| fld.fold_expr(*x))), - expr_log(i, lv, e) => expr_log(i, fld.fold_expr(lv), - fld.fold_expr(e)), - expr_assert(e) => expr_assert(fld.fold_expr(e)), - expr_mac(ref mac) => expr_mac(fold_mac((*mac))), - expr_struct(path, ref fields, maybe_expr) => { - expr_struct(fld.fold_path(path), - vec::map((*fields), |x| fold_field(*x)), - option::map(&maybe_expr, |x| fld.fold_expr(*x))) - }, - expr_paren(ex) => expr_paren(fld.fold_expr(ex)) - } -} - -pub fn noop_fold_ty(t: ty_, fld: ast_fold) -> ty_ { + } + expr_path(pth) => expr_path(fld.fold_path(pth)), + expr_break(ref opt_ident) => { + expr_break(opt_ident.map(|x| fld.fold_ident(*x))) + } + expr_again(ref opt_ident) => { + expr_again(opt_ident.map(|x| fld.fold_ident(*x))) + } + expr_ret(ref e) => { + expr_ret(e.map(|x| fld.fold_expr(*x))) + } + expr_log(i, lv, e) => { + expr_log( + i, + fld.fold_expr(lv), + fld.fold_expr(e) + ) + } + expr_assert(e) => expr_assert(fld.fold_expr(e)), + expr_mac(ref mac) => expr_mac(fold_mac((*mac))), + expr_struct(path, ref fields, maybe_expr) => { + expr_struct( + fld.fold_path(path), + fields.map(|x| fold_field(*x)), + maybe_expr.map(|x| fld.fold_expr(*x)) + ) + }, + expr_paren(ex) => expr_paren(fld.fold_expr(ex)) + } +} + +pub fn noop_fold_ty(t: &ty_, fld: ast_fold) -> ty_ { let fold_mac = |x| fold_mac_(x, fld); - fn fold_mt(mt: mt, fld: ast_fold) -> mt { - mt { ty: fld.fold_ty(mt.ty), mutbl: mt.mutbl } + fn fold_mt(mt: &mt, fld: ast_fold) -> mt { + mt { + ty: fld.fold_ty(mt.ty), + mutbl: mt.mutbl, + } } fn fold_field(f: ty_field, fld: ast_fold) -> ty_field { spanned { node: ast::ty_field_ { ident: fld.fold_ident(f.node.ident), - mt: fold_mt(f.node.mt, fld), + mt: fold_mt(&f.node.mt, fld), }, span: fld.new_span(f.span), } } - match t { - ty_nil | ty_bot | ty_infer => copy t, - ty_box(mt) => ty_box(fold_mt(mt, fld)), - ty_uniq(mt) => ty_uniq(fold_mt(mt, fld)), - ty_vec(mt) => ty_vec(fold_mt(mt, fld)), - ty_ptr(mt) => ty_ptr(fold_mt(mt, fld)), - ty_rptr(region, mt) => ty_rptr(region, fold_mt(mt, fld)), - ty_rec(ref fields) => - ty_rec(vec::map((*fields), |f| fold_field(*f, fld))), - ty_closure(f) => - ty_closure(@TyClosure { - sigil: f.sigil, - purity: f.purity, - region: f.region, - onceness: f.onceness, - decl: fold_fn_decl(f.decl, fld) - }), - ty_bare_fn(f) => - ty_bare_fn(@TyBareFn { - purity: f.purity, - abi: f.abi, - decl: fold_fn_decl(f.decl, fld) - }), - ty_tup(tys) => ty_tup(vec::map(tys, |ty| fld.fold_ty(*ty))), - ty_path(path, id) => ty_path(fld.fold_path(path), fld.new_id(id)), - ty_fixed_length_vec(mt, vs) => - ty_fixed_length_vec(fold_mt(mt, fld), vs), - ty_mac(ref mac) => ty_mac(fold_mac((*mac))) + match *t { + ty_nil | ty_bot | ty_infer => copy *t, + ty_box(ref mt) => ty_box(fold_mt(mt, fld)), + ty_uniq(ref mt) => ty_uniq(fold_mt(mt, fld)), + ty_vec(ref mt) => ty_vec(fold_mt(mt, fld)), + ty_ptr(ref mt) => ty_ptr(fold_mt(mt, fld)), + ty_rptr(region, ref mt) => ty_rptr(region, fold_mt(mt, fld)), + ty_rec(ref fields) => ty_rec(fields.map(|f| fold_field(*f, fld))), + ty_closure(ref f) => { + ty_closure(@TyClosure { + sigil: f.sigil, + purity: f.purity, + region: f.region, + onceness: f.onceness, + decl: fold_fn_decl(&f.decl, fld) + }) + } + ty_bare_fn(ref f) => { + ty_bare_fn(@TyBareFn { + purity: f.purity, + abi: f.abi, + decl: fold_fn_decl(&f.decl, fld) + }) + } + ty_tup(ref tys) => ty_tup(tys.map(|ty| fld.fold_ty(*ty))), + ty_path(path, id) => ty_path(fld.fold_path(path), fld.new_id(id)), + ty_fixed_length_vec(ref mt, vs) => { + ty_fixed_length_vec( + fold_mt(mt, fld), + vs + ) + } + ty_mac(ref mac) => ty_mac(fold_mac(*mac)) } } // ...nor do modules -pub fn noop_fold_mod(m: _mod, fld: ast_fold) -> _mod { +pub fn noop_fold_mod(m: &_mod, fld: ast_fold) -> _mod { ast::_mod { view_items: vec::map(m.view_items, |x| fld.fold_view_item(*x)), items: vec::filter_mapped(m.items, |x| fld.fold_item(*x)), } } -fn noop_fold_foreign_mod(nm: foreign_mod, fld: ast_fold) -> foreign_mod { +fn noop_fold_foreign_mod(nm: &foreign_mod, fld: ast_fold) -> foreign_mod { ast::foreign_mod { sort: nm.sort, abi: nm.abi, @@ -577,7 +654,7 @@ fn noop_fold_foreign_mod(nm: foreign_mod, fld: ast_fold) -> foreign_mod { } } -fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ { +fn noop_fold_variant(v: &variant_, fld: ast_fold) -> variant_ { fn fold_variant_arg_(va: variant_arg, fld: ast_fold) -> variant_arg { ast::variant_arg { ty: fld.fold_ty(va.ty), id: fld.new_id(va.id) } } @@ -585,17 +662,23 @@ fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ { let kind; match v.kind { - tuple_variant_kind(variant_args) => - kind = tuple_variant_kind(vec::map(variant_args, - |x| fold_variant_arg(*x))), + tuple_variant_kind(ref variant_args) => { + kind = tuple_variant_kind(do variant_args.map |x| { + fold_variant_arg(*x) + }) + } struct_variant_kind(struct_def) => { let dtor = do option::map(&struct_def.dtor) |dtor| { - let dtor_body = fld.fold_block(dtor.node.body); + let dtor_body = fld.fold_block(&dtor.node.body); let dtor_id = fld.new_id(dtor.node.id); - spanned { node: ast::struct_dtor_ { body: dtor_body, - id: dtor_id, - .. dtor.node}, - .. *dtor } + spanned { + node: ast::struct_dtor_ { + body: dtor_body, + id: dtor_id, + .. copy dtor.node + }, + .. copy *dtor + } }; kind = struct_variant_kind(@ast::struct_def { fields: vec::map(struct_def.fields, @@ -604,12 +687,13 @@ fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ { ctor_id: option::map(&struct_def.ctor_id, |c| fld.new_id(*c)) }) } - enum_variant_kind(ref enum_definition) => { - let variants = vec::map((*enum_definition).variants, - |x| fld.fold_variant(*x)); - let common = option::map(&(*enum_definition).common, - |x| fold_struct_def(*x, fld)); + let variants = do (*enum_definition).variants.map |x| { + fld.fold_variant(x) + }; + let common = do (*enum_definition).common.map |x| { + fold_struct_def(*x, fld) + }; kind = enum_variant_kind( ast::enum_def(ast::enum_def_ { variants: variants, @@ -620,7 +704,7 @@ fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ { } let fold_attribute = |x| fold_attribute_(x, fld); - let attrs = vec::map(v.attrs, |x| fold_attribute(*x)); + let attrs = v.attrs.map(|x| fold_attribute(*x)); let de = match v.disr_expr { Some(e) => Some(fld.fold_expr(e)), @@ -636,11 +720,11 @@ fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ { } } -fn noop_fold_ident(&&i: ident, _fld: ast_fold) -> ident { - return /* FIXME (#2543) */ copy i; +fn noop_fold_ident(i: ident, _fld: ast_fold) -> ident { + /* FIXME (#2543) */ copy i } -fn noop_fold_path(&&p: path, fld: ast_fold) -> path { +fn noop_fold_path(p: @path, fld: ast_fold) -> path { ast::path { span: fld.new_span(p.span), global: p.global, idents: p.idents.map(|x| fld.fold_ident(*x)), @@ -648,7 +732,7 @@ fn noop_fold_path(&&p: path, fld: ast_fold) -> path { types: p.types.map(|x| fld.fold_ty(*x)) } } -fn noop_fold_local(l: local_, fld: ast_fold) -> local_ { +fn noop_fold_local(l: &local_, fld: ast_fold) -> local_ { local_ { is_mutbl: l.is_mutbl, ty: fld.fold_ty(l.ty), @@ -660,8 +744,8 @@ fn noop_fold_local(l: local_, fld: ast_fold) -> local_ { /* temporarily eta-expand because of a compiler bug with using `fn` as a value */ -fn noop_map_exprs(f: fn@(&&v: @expr) -> @expr, es: ~[@expr]) -> ~[@expr] { - return vec::map(es, |x| f(*x)); +fn noop_map_exprs(f: fn@(@expr) -> @expr, es: &[@expr]) -> ~[@expr] { + es.map(|x| f(*x)) } fn noop_id(i: node_id) -> node_id { return i; } @@ -669,38 +753,40 @@ fn noop_id(i: node_id) -> node_id { return i; } fn noop_span(sp: span) -> span { return sp; } pub fn default_ast_fold() -> ast_fold_fns { - return @AstFoldFns {fold_crate: wrap(noop_fold_crate), - fold_view_item: noop_fold_view_item, - fold_foreign_item: noop_fold_foreign_item, - fold_item: noop_fold_item, - fold_struct_field: noop_fold_struct_field, - fold_item_underscore: noop_fold_item_underscore, - fold_method: noop_fold_method, - fold_block: wrap(noop_fold_block), - fold_stmt: wrap(noop_fold_stmt), - fold_arm: noop_fold_arm, - fold_pat: wrap(noop_fold_pat), - fold_decl: wrap(noop_fold_decl), - fold_expr: wrap(noop_fold_expr), - fold_ty: wrap(noop_fold_ty), - fold_mod: noop_fold_mod, - fold_foreign_mod: noop_fold_foreign_mod, - fold_variant: wrap(noop_fold_variant), - fold_ident: noop_fold_ident, - fold_path: noop_fold_path, - fold_local: wrap(noop_fold_local), - map_exprs: noop_map_exprs, - new_id: noop_id, - new_span: noop_span}; + @AstFoldFns { + fold_crate: wrap(noop_fold_crate), + fold_view_item: noop_fold_view_item, + fold_foreign_item: noop_fold_foreign_item, + fold_item: noop_fold_item, + fold_struct_field: noop_fold_struct_field, + fold_item_underscore: noop_fold_item_underscore, + fold_method: noop_fold_method, + fold_block: wrap(noop_fold_block), + fold_stmt: wrap(noop_fold_stmt), + fold_arm: noop_fold_arm, + fold_pat: wrap(noop_fold_pat), + fold_decl: wrap(noop_fold_decl), + fold_expr: wrap(noop_fold_expr), + fold_ty: wrap(noop_fold_ty), + fold_mod: noop_fold_mod, + fold_foreign_mod: noop_fold_foreign_mod, + fold_variant: wrap(noop_fold_variant), + fold_ident: noop_fold_ident, + fold_path: noop_fold_path, + fold_local: wrap(noop_fold_local), + map_exprs: noop_map_exprs, + new_id: noop_id, + new_span: noop_span + } } pub impl ast_fold for ast_fold_fns { /* naturally, a macro to write these would be nice */ - fn fold_crate(c: crate) -> crate { - let (n, s) = (self.fold_crate)(c.node, c.span, self as ast_fold); + fn fold_crate(c: &crate) -> crate { + let (n, s) = (self.fold_crate)(&c.node, c.span, self as ast_fold); spanned { node: n, span: (self.new_span)(s) } } - fn fold_view_item(&&x: @view_item) -> + fn fold_view_item(x: @view_item) -> @view_item { @ast::view_item { node: (self.fold_view_item)(x.node, self as ast_fold), @@ -710,14 +796,13 @@ pub impl ast_fold for ast_fold_fns { span: (self.new_span)(x.span), } } - fn fold_foreign_item(&&x: @foreign_item) - -> @foreign_item { - return (self.fold_foreign_item)(x, self as ast_fold); + fn fold_foreign_item(x: @foreign_item) -> @foreign_item { + (self.fold_foreign_item)(x, self as ast_fold) } - fn fold_item(&&i: @item) -> Option<@item> { - return (self.fold_item)(i, self as ast_fold); + fn fold_item(i: @item) -> Option<@item> { + (self.fold_item)(i, self as ast_fold) } - fn fold_struct_field(&&sf: @struct_field) -> @struct_field { + fn fold_struct_field(sf: @struct_field) -> @struct_field { @spanned { node: ast::struct_field_ { kind: copy sf.node.kind, @@ -727,39 +812,37 @@ pub impl ast_fold for ast_fold_fns { span: (self.new_span)(sf.span), } } - fn fold_item_underscore(i: item_) -> - item_ { - return (self.fold_item_underscore)(i, self as ast_fold); + fn fold_item_underscore(i: &item_) -> item_ { + (self.fold_item_underscore)(i, self as ast_fold) } - fn fold_method(&&x: @method) - -> @method { - return (self.fold_method)(x, self as ast_fold); + fn fold_method(x: @method) -> @method { + (self.fold_method)(x, self as ast_fold) } - fn fold_block(x: blk) -> blk { - let (n, s) = (self.fold_block)(x.node, x.span, self as ast_fold); + fn fold_block(x: &blk) -> blk { + let (n, s) = (self.fold_block)(&x.node, x.span, self as ast_fold); spanned { node: n, span: (self.new_span)(s) } } - fn fold_stmt(&&x: @stmt) -> @stmt { - let (n, s) = (self.fold_stmt)(x.node, x.span, self as ast_fold); + fn fold_stmt(x: &stmt) -> @stmt { + let (n, s) = (self.fold_stmt)(&x.node, x.span, self as ast_fold); @spanned { node: n, span: (self.new_span)(s) } } - fn fold_arm(x: arm) -> arm { - return (self.fold_arm)(x, self as ast_fold); + fn fold_arm(x: &arm) -> arm { + (self.fold_arm)(x, self as ast_fold) } - fn fold_pat(&&x: @pat) -> @pat { - let (n, s) = (self.fold_pat)(x.node, x.span, self as ast_fold); + fn fold_pat(x: @pat) -> @pat { + let (n, s) = (self.fold_pat)(&x.node, x.span, self as ast_fold); @pat { id: (self.new_id)(x.id), node: n, span: (self.new_span)(s), } } - fn fold_decl(&&x: @decl) -> @decl { - let (n, s) = (self.fold_decl)(x.node, x.span, self as ast_fold); + fn fold_decl(x: @decl) -> @decl { + let (n, s) = (self.fold_decl)(&x.node, x.span, self as ast_fold); @spanned { node: n, span: (self.new_span)(s) } } - fn fold_expr(&&x: @expr) -> @expr { - let (n, s) = (self.fold_expr)(x.node, x.span, self as ast_fold); + fn fold_expr(x: @expr) -> @expr { + let (n, s) = (self.fold_expr)(&x.node, x.span, self as ast_fold); @expr { id: (self.new_id)(x.id), callee_id: (self.new_id)(x.callee_id), @@ -767,37 +850,35 @@ pub impl ast_fold for ast_fold_fns { span: (self.new_span)(s), } } - fn fold_ty(&&x: @Ty) -> @Ty { - let (n, s) = (self.fold_ty)(x.node, x.span, self as ast_fold); + fn fold_ty(x: @Ty) -> @Ty { + let (n, s) = (self.fold_ty)(&x.node, x.span, self as ast_fold); @Ty { id: (self.new_id)(x.id), node: n, span: (self.new_span)(s), } } - fn fold_mod(x: _mod) -> _mod { - return (self.fold_mod)(x, self as ast_fold); + fn fold_mod(x: &_mod) -> _mod { + (self.fold_mod)(x, self as ast_fold) } - fn fold_foreign_mod(x: foreign_mod) -> - foreign_mod { - return (self.fold_foreign_mod)(x, self as ast_fold); + fn fold_foreign_mod(x: &foreign_mod) -> foreign_mod { + (self.fold_foreign_mod)(x, self as ast_fold) } - fn fold_variant(x: variant) -> - variant { - let (n, s) = (self.fold_variant)(x.node, x.span, self as ast_fold); + fn fold_variant(x: &variant) -> variant { + let (n, s) = (self.fold_variant)(&x.node, x.span, self as ast_fold); spanned { node: n, span: (self.new_span)(s) } } - fn fold_ident(&&x: ident) -> ident { - return (self.fold_ident)(x, self as ast_fold); + fn fold_ident(x: ident) -> ident { + (self.fold_ident)(x, self as ast_fold) } - fn fold_path(&&x: @path) -> @path { - @(self.fold_path)(*x, self as ast_fold) + fn fold_path(x: @path) -> @path { + @(self.fold_path)(x, self as ast_fold) } - fn fold_local(&&x: @local) -> @local { - let (n, s) = (self.fold_local)(x.node, x.span, self as ast_fold); + fn fold_local(x: @local) -> @local { + let (n, s) = (self.fold_local)(&x.node, x.span, self as ast_fold); @spanned { node: n, span: (self.new_span)(s) } } - fn map_exprs(f: fn@(&&v: @expr) -> @expr, e: ~[@expr]) -> ~[@expr] { + fn map_exprs(f: fn@(@expr) -> @expr, e: &[@expr]) -> ~[@expr] { (self.map_exprs)(f, e) } fn new_id(node_id: ast::node_id) -> node_id { diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 6230b0465dcc2..b4773fe2f97b9 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -38,8 +38,8 @@ use core::u64; use core::vec; // The @ps is stored here to prevent recursive type. -pub enum ann_node { - node_block(@ps, ast::blk), +pub enum ann_node/& { + node_block(@ps, &ast::blk), node_item(@ps, @ast::item), node_expr(@ps, @ast::expr), node_pat(@ps, @ast::pat), @@ -183,7 +183,7 @@ pub fn fun_to_str(decl: ast::fn_decl, name: ast::ident, } } -pub fn block_to_str(blk: ast::blk, intr: @ident_interner) -> ~str { +pub fn block_to_str(blk: &ast::blk, intr: @ident_interner) -> ~str { do io::with_str_writer |wr| { let s = rust_printer(wr, intr); // containing cbox, will be closed by print-block at } @@ -510,7 +510,7 @@ pub fn print_item(s: @ps, &&item: @ast::item) { item.vis ); word(s.s, ~" "); - print_block_with_attrs(s, (*body), item.attrs); + print_block_with_attrs(s, body, item.attrs); } ast::item_mod(_mod) => { head(s, visibility_qualified(item.vis, ~"mod")); @@ -607,8 +607,8 @@ pub fn print_item(s: @ps, &&item: @ast::item) { } word(s.s, ~" "); bopen(s); - for (*methods).each |meth| { - print_trait_method(s, *meth); + for methods.each |meth| { + print_trait_method(s, meth); } bclose(s, item.span); } @@ -741,7 +741,7 @@ pub fn print_struct(s: @ps, maybe_print_comment(s, dtor.span.lo); print_outer_attributes(s, dtor.node.attrs); head(s, ~"drop"); - print_block(s, dtor.node.body); + print_block(s, &dtor.node.body); } for struct_def.fields.each |field| { @@ -839,7 +839,7 @@ pub fn print_variant(s: @ps, v: ast::variant) { } } -pub fn print_ty_method(s: @ps, m: ast::ty_method) { +pub fn print_ty_method(s: @ps, m: &ast::ty_method) { hardbreak_if_not_bol(s); maybe_print_comment(s, m.span.lo); print_outer_attributes(s, m.attrs); @@ -849,10 +849,10 @@ pub fn print_ty_method(s: @ps, m: ast::ty_method) { word(s.s, ~";"); } -pub fn print_trait_method(s: @ps, m: ast::trait_method) { - match m { - required(ref ty_m) => print_ty_method(s, (*ty_m)), - provided(m) => print_method(s, m) +pub fn print_trait_method(s: @ps, m: &ast::trait_method) { + match *m { + required(ref ty_m) => print_ty_method(s, ty_m), + provided(m) => print_method(s, m) } } @@ -864,7 +864,7 @@ pub fn print_method(s: @ps, meth: @ast::method) { meth.ident, meth.tps, Some(meth.self_ty.node), meth.vis); word(s.s, ~" "); - print_block_with_attrs(s, meth.body, meth.attrs); + print_block_with_attrs(s, &meth.body, meth.attrs); } pub fn print_outer_attributes(s: @ps, attrs: ~[ast::attribute]) { @@ -935,22 +935,22 @@ pub fn print_stmt(s: @ps, st: ast::stmt) { maybe_print_trailing_comment(s, st.span, None); } -pub fn print_block(s: @ps, blk: ast::blk) { +pub fn print_block(s: @ps, blk: &ast::blk) { print_possibly_embedded_block(s, blk, block_normal, indent_unit); } -pub fn print_block_unclosed(s: @ps, blk: ast::blk) { +pub fn print_block_unclosed(s: @ps, blk: &ast::blk) { print_possibly_embedded_block_(s, blk, block_normal, indent_unit, ~[], false); } -pub fn print_block_unclosed_indent(s: @ps, blk: ast::blk, indented: uint) { +pub fn print_block_unclosed_indent(s: @ps, blk: &ast::blk, indented: uint) { print_possibly_embedded_block_(s, blk, block_normal, indented, ~[], false); } pub fn print_block_with_attrs(s: @ps, - blk: ast::blk, + blk: &ast::blk, attrs: ~[ast::attribute]) { print_possibly_embedded_block_(s, blk, block_normal, indent_unit, attrs, true); @@ -959,7 +959,7 @@ pub fn print_block_with_attrs(s: @ps, pub enum embed_type { block_block_fn, block_normal, } pub fn print_possibly_embedded_block(s: @ps, - blk: ast::blk, + blk: &ast::blk, embedded: embed_type, indented: uint) { print_possibly_embedded_block_( @@ -967,7 +967,7 @@ pub fn print_possibly_embedded_block(s: @ps, } pub fn print_possibly_embedded_block_(s: @ps, - blk: ast::blk, + blk: &ast::blk, embedded: embed_type, indented: uint, attrs: ~[ast::attribute], @@ -1002,7 +1002,7 @@ pub fn print_possibly_embedded_block_(s: @ps, (s.ann.post)(ann_node); } -pub fn print_if(s: @ps, test: @ast::expr, blk: ast::blk, +pub fn print_if(s: @ps, test: @ast::expr, blk: &ast::blk, elseopt: Option<@ast::expr>, chk: bool) { head(s, ~"if"); if chk { word_nbsp(s, ~"check"); } @@ -1020,7 +1020,7 @@ pub fn print_if(s: @ps, test: @ast::expr, blk: ast::blk, word(s.s, ~" else if "); print_expr(s, i); space(s.s); - print_block(s, (*t)); + print_block(s, t); do_else(s, e); } // "final else" @@ -1028,7 +1028,7 @@ pub fn print_if(s: @ps, test: @ast::expr, blk: ast::blk, cbox(s, indent_unit - 1u); ibox(s, 0u); word(s.s, ~" else "); - print_block(s, (*b)); + print_block(s, b); } // BLEAH, constraints would be great here _ => { @@ -1263,13 +1263,13 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { print_type_ex(s, ty, true); } ast::expr_if(test, ref blk, elseopt) => { - print_if(s, test, (*blk), elseopt, false); + print_if(s, test, blk, elseopt, false); } ast::expr_while(test, ref blk) => { head(s, ~"while"); print_expr(s, test); space(s.s); - print_block(s, (*blk)); + print_block(s, blk); } ast::expr_loop(ref blk, opt_ident) => { head(s, ~"loop"); @@ -1278,7 +1278,7 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { print_ident(s, *ident); word_space(s, ~":"); }); - print_block(s, (*blk)); + print_block(s, blk); } ast::expr_match(expr, ref arms) => { cbox(s, match_indent_unit); @@ -1323,7 +1323,7 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { ast::expr_block(ref blk) => { // the block will close the pattern's ibox print_block_unclosed_indent( - s, (*blk), match_indent_unit); + s, blk, match_indent_unit); } _ => { end(s); // close the ibox for the pattern @@ -1340,7 +1340,7 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { } } else { // the block will close the pattern's ibox - print_block_unclosed_indent(s, arm.body, match_indent_unit); + print_block_unclosed_indent(s, &arm.body, match_indent_unit); } } bclose_(s, expr.span, match_indent_unit); @@ -1354,7 +1354,7 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { Some(sigil), ast::inherited); print_fn_args_and_ret(s, /* FIXME (#2543) */ copy *decl, None); space(s.s); - print_block(s, (*body)); + print_block(s, body); } ast::expr_fn_block(ref decl, ref body) => { // in do/for blocks we don't want to show an empty @@ -1365,16 +1365,16 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { print_fn_block_args(s, /* FIXME (#2543) */ copy *decl); space(s.s); // } - assert (*body).node.stmts.is_empty(); - assert (*body).node.expr.is_some(); + assert body.node.stmts.is_empty(); + assert body.node.expr.is_some(); // we extract the block, so as not to create another set of boxes - match (*body).node.expr.get().node { + match body.node.expr.get().node { ast::expr_block(ref blk) => { - print_block_unclosed(s, (*blk)); + print_block_unclosed(s, blk); } _ => { // this is a bare expression - print_expr(s, (*body).node.expr.get()); + print_expr(s, body.node.expr.get()); end(s); // need to close a box } } @@ -1394,7 +1394,7 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { cbox(s, indent_unit); // head-box, will be closed by print-block after { ibox(s, 0u); - print_block(s, (*blk)); + print_block(s, blk); } ast::expr_copy(e) => { word_space(s, ~"copy"); print_expr(s, e); } ast::expr_assign(lhs, rhs) => { diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index d79a2bbf17469..1b0842dbd86ae 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -67,18 +67,18 @@ pub struct Visitor { visit_foreign_item: fn@(@foreign_item, E, vt), visit_item: fn@(@item, E, vt), visit_local: fn@(@local, E, vt), - visit_block: fn@(ast::blk, E, vt), + visit_block: fn@(&blk, E, vt), visit_stmt: fn@(@stmt, E, vt), - visit_arm: fn@(arm, E, vt), + visit_arm: fn@(&arm, E, vt), visit_pat: fn@(@pat, E, vt), visit_decl: fn@(@decl, E, vt), visit_expr: fn@(@expr, E, vt), visit_expr_post: fn@(@expr, E, vt), visit_ty: fn@(@Ty, E, vt), visit_ty_params: fn@(&[ty_param], E, vt), - visit_fn: fn@(&fn_kind, fn_decl, blk, span, node_id, E, vt), - visit_ty_method: fn@(ty_method, E, vt), - visit_trait_method: fn@(trait_method, E, vt), + visit_fn: fn@(&fn_kind, &fn_decl, &blk, span, node_id, E, vt), + visit_ty_method: fn@(&ty_method, E, vt), + visit_trait_method: fn@(&trait_method, E, vt), visit_struct_def: fn@(@struct_def, ident, &[ty_param], node_id, E, vt), visit_struct_field: fn@(@struct_field, E, vt), @@ -146,8 +146,8 @@ pub fn visit_item(i: @item, e: E, v: vt) { *tps, purity ), - /* FIXME (#2543) */ copy *decl, - *body, + decl, + body, i.span, i.id, e, @@ -190,7 +190,7 @@ pub fn visit_item(i: @item, e: E, v: vt) { (v.visit_ty_params)(*tps, e, v); for traits.each |p| { visit_path(p.path, e, v); } for (*methods).each |m| { - (v.visit_trait_method)(*m, e, v); + (v.visit_trait_method)(m, e, v); } } item_mac(ref m) => visit_mac((*m), e, v) @@ -198,12 +198,12 @@ pub fn visit_item(i: @item, e: E, v: vt) { } pub fn visit_enum_def(enum_definition: ast::enum_def, - tps: ~[ast::ty_param], + tps: &[ast::ty_param], e: E, v: vt) { for enum_definition.variants.each |vr| { match vr.node.kind { - tuple_variant_kind(variant_args) => { + tuple_variant_kind(ref variant_args) => { for variant_args.each |va| { (v.visit_ty)(va.ty, e, v); } } struct_variant_kind(struct_def) => { @@ -223,30 +223,31 @@ pub fn skip_ty(_t: @Ty, _e: E, _v: vt) {} pub fn visit_ty(t: @Ty, e: E, v: vt) { match t.node { - ty_box(mt) | ty_uniq(mt) | - ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) => { - (v.visit_ty)(mt.ty, e, v); - } - ty_rec(ref flds) => for (*flds).each |f| { - (v.visit_ty)(f.node.mt.ty, e, v); - }, - ty_tup(ts) => for ts.each |tt| { - (v.visit_ty)(*tt, e, v); - }, - ty_closure(f) => { - for f.decl.inputs.each |a| { (v.visit_ty)(a.ty, e, v); } - (v.visit_ty)(f.decl.output, e, v); - } - ty_bare_fn(f) => { - for f.decl.inputs.each |a| { (v.visit_ty)(a.ty, e, v); } - (v.visit_ty)(f.decl.output, e, v); - } - ty_path(p, _) => visit_path(p, e, v), - ty_fixed_length_vec(mt, _) => (v.visit_ty)(mt.ty, e, v), - ty_nil | - ty_bot | - ty_mac(_) | - ty_infer => () + ty_box(mt) | ty_uniq(mt) | + ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) => { + (v.visit_ty)(mt.ty, e, v); + }, + ty_rec(ref flds) => { + for flds.each |f| { + (v.visit_ty)(f.node.mt.ty, e, v); + } + }, + ty_tup(ref ts) => { + for ts.each |tt| { + (v.visit_ty)(*tt, e, v); + } + }, + ty_closure(ref f) => { + for f.decl.inputs.each |a| { (v.visit_ty)(a.ty, e, v); } + (v.visit_ty)(f.decl.output, e, v); + }, + ty_bare_fn(ref f) => { + for f.decl.inputs.each |a| { (v.visit_ty)(a.ty, e, v); } + (v.visit_ty)(f.decl.output, e, v); + }, + ty_path(p, _) => visit_path(p, e, v), + ty_fixed_length_vec(ref mt, _) => (v.visit_ty)(mt.ty, e, v), + ty_nil | ty_bot | ty_mac(_) | ty_infer => () } } @@ -256,49 +257,55 @@ pub fn visit_path(p: @path, e: E, v: vt) { pub fn visit_pat(p: @pat, e: E, v: vt) { match p.node { - pat_enum(path, children) => { - visit_path(path, e, v); - do option::iter(&children) |children| { - for children.each |child| { (v.visit_pat)(*child, e, v); }} - } - pat_rec(fields, _) => for fields.each |f| { - (v.visit_pat)(f.pat, e, v) - }, - pat_struct(path, fields, _) => { - visit_path(path, e, v); - for fields.each |f| { - (v.visit_pat)(f.pat, e, v); + pat_enum(path, ref children) => { + visit_path(path, e, v); + do children.iter |children| { + for children.each |child| { (v.visit_pat)(*child, e, v); } + } } - } - pat_tup(elts) => for elts.each |elt| { - (v.visit_pat)(*elt, e, v) - }, - pat_box(inner) | pat_uniq(inner) | pat_region(inner) => - (v.visit_pat)(inner, e, v), - pat_ident(_, path, inner) => { - visit_path(path, e, v); - do option::iter(&inner) |subpat| { (v.visit_pat)(*subpat, e, v)}; - } - pat_lit(ex) => (v.visit_expr)(ex, e, v), - pat_range(e1, e2) => { - (v.visit_expr)(e1, e, v); - (v.visit_expr)(e2, e, v); - } - pat_wild => (), - pat_vec(elts, tail) => { - for elts.each |elt| { - (v.visit_pat)(*elt, e, v); + pat_rec(ref fields, _) => { + for fields.each |f| { + (v.visit_pat)(f.pat, e, v) + } + }, + pat_struct(path, ref fields, _) => { + visit_path(path, e, v); + for fields.each |f| { + (v.visit_pat)(f.pat, e, v); + } + } + pat_tup(ref elts) => { + for elts.each |elt| { + (v.visit_pat)(*elt, e, v) + } + }, + pat_box(inner) | pat_uniq(inner) | pat_region(inner) => { + (v.visit_pat)(inner, e, v) + }, + pat_ident(_, path, ref inner) => { + visit_path(path, e, v); + do inner.iter |subpat| { (v.visit_pat)(*subpat, e, v) } } - do option::iter(&tail) |tail| { - (v.visit_pat)(*tail, e, v); + pat_lit(ex) => (v.visit_expr)(ex, e, v), + pat_range(e1, e2) => { + (v.visit_expr)(e1, e, v); + (v.visit_expr)(e2, e, v); + } + pat_wild => (), + pat_vec(ref elts, ref tail) => { + for elts.each |elt| { + (v.visit_pat)(*elt, e, v); + } + do tail.iter |tail| { + (v.visit_pat)(*tail, e, v); + } } - } } } pub fn visit_foreign_item(ni: @foreign_item, e: E, v: vt) { match ni.node { - foreign_item_fn(fd, _, ref tps) => { + foreign_item_fn(ref fd, _, ref tps) => { (v.visit_ty_params)(*tps, e, v); visit_fn_decl(fd, e, v); } @@ -323,7 +330,7 @@ pub fn visit_ty_params(tps: &[ty_param], e: E, v: vt) { } } -pub fn visit_fn_decl(fd: fn_decl, e: E, v: vt) { +pub fn visit_fn_decl(fd: &fn_decl, e: E, v: vt) { for fd.inputs.each |a| { (v.visit_pat)(a.pat, e, v); (v.visit_ty)(a.ty, e, v); @@ -342,8 +349,8 @@ pub fn visit_method_helper(m: &method, e: E, v: vt) { m.tps, m ), - m.decl, - m.body, + &m.decl, + &m.body, m.span, m.id, e, @@ -360,8 +367,8 @@ pub fn visit_struct_dtor_helper(dtor: struct_dtor, tps: &[ty_param], dtor.node.self_id, parent_id ), - ast_util::dtor_dec(), - dtor.node.body, + &ast_util::dtor_dec(), + &dtor.node.body, dtor.span, dtor.node.id, e, @@ -370,22 +377,22 @@ pub fn visit_struct_dtor_helper(dtor: struct_dtor, tps: &[ty_param], } -pub fn visit_fn(fk: &fn_kind, decl: fn_decl, body: blk, _sp: span, +pub fn visit_fn(fk: &fn_kind, decl: &fn_decl, body: &blk, _sp: span, _id: node_id, e: E, v: vt) { visit_fn_decl(decl, e, v); (v.visit_ty_params)(tps_of_fn(fk), e, v); (v.visit_block)(body, e, v); } -pub fn visit_ty_method(m: ty_method, e: E, v: vt) { +pub fn visit_ty_method(m: &ty_method, e: E, v: vt) { for m.decl.inputs.each |a| { (v.visit_ty)(a.ty, e, v); } (v.visit_ty_params)(m.tps, e, v); (v.visit_ty)(m.decl.output, e, v); } -pub fn visit_trait_method(m: trait_method, e: E, v: vt) { - match m { - required(ref ty_m) => (v.visit_ty_method)((*ty_m), e, v), +pub fn visit_trait_method(m: &trait_method, e: E, v: vt) { + match *m { + required(ref ty_m) => (v.visit_ty_method)(ty_m, e, v), provided(m) => visit_method_helper(m, e, v) } } @@ -408,7 +415,7 @@ pub fn visit_struct_method(m: @method, e: E, v: vt) { visit_method_helper(m, e, v); } -pub fn visit_block(b: ast::blk, e: E, v: vt) { +pub fn visit_block(b: &blk, e: E, v: vt) { for b.node.view_items.each |vi| { (v.visit_view_item)(*vi, e, v); } @@ -429,10 +436,12 @@ pub fn visit_stmt(s: @stmt, e: E, v: vt) { pub fn visit_decl(d: @decl, e: E, v: vt) { match d.node { - decl_local(locs) => for locs.each |loc| { - (v.visit_local)(*loc, e, v) - }, - decl_item(it) => (v.visit_item)(it, e, v) + decl_local(ref locs) => { + for locs.each |loc| { + (v.visit_local)(*loc, e, v) + } + }, + decl_item(it) => (v.visit_item)(it, e, v) } } @@ -440,7 +449,7 @@ pub fn visit_expr_opt(eo: Option<@expr>, e: E, v: vt) { match eo { None => (), Some(ex) => (v.visit_expr)(ex, e, v) } } -pub fn visit_exprs(exprs: ~[@expr], e: E, v: vt) { +pub fn visit_exprs(exprs: &[@expr], e: E, v: vt) { for exprs.each |ex| { (v.visit_expr)(*ex, e, v); } } @@ -450,112 +459,121 @@ pub fn visit_mac(_m: mac, _e: E, _v: vt) { pub fn visit_expr(ex: @expr, e: E, v: vt) { match ex.node { - expr_vstore(x, _) => (v.visit_expr)(x, e, v), - expr_vec(es, _) => visit_exprs(es, e, v), - expr_repeat(element, count, _) => { - (v.visit_expr)(element, e, v); - (v.visit_expr)(count, e, v); - } - expr_rec(ref flds, base) => { - for (*flds).each |f| { (v.visit_expr)(f.node.expr, e, v); } - visit_expr_opt(base, e, v); - } - expr_struct(p, ref flds, base) => { - visit_path(p, e, v); - for (*flds).each |f| { (v.visit_expr)(f.node.expr, e, v); } - visit_expr_opt(base, e, v); - } - expr_tup(elts) => for elts.each |el| { (v.visit_expr)(*el, e, v); }, - expr_call(callee, args, _) => { - visit_exprs(args, e, v); - (v.visit_expr)(callee, e, v); - } - expr_method_call(callee, _, tys, args, _) => { - visit_exprs(args, e, v); - for tys.each |tp| { (v.visit_ty)(*tp, e, v); } - (v.visit_expr)(callee, e, v); - } - expr_binary(_, a, b) => { - (v.visit_expr)(a, e, v); (v.visit_expr)(b, e, v); - } - expr_addr_of(_, x) | expr_unary(_, x) | - expr_loop_body(x) | expr_do_body(x) | - expr_assert(x) => (v.visit_expr)(x, e, v), - expr_lit(_) => (), - expr_cast(x, t) => { (v.visit_expr)(x, e, v); (v.visit_ty)(t, e, v); } - expr_if(x, ref b, eo) => { - (v.visit_expr)(x, e, v); - (v.visit_block)((*b), e, v); - visit_expr_opt(eo, e, v); - } - expr_while(x, ref b) => { - (v.visit_expr)(x, e, v); - (v.visit_block)((*b), e, v); - } - expr_loop(ref b, _) => (v.visit_block)((*b), e, v), - expr_match(x, ref arms) => { - (v.visit_expr)(x, e, v); - for (*arms).each |a| { (v.visit_arm)(*a, e, v); } - } - expr_fn(proto, ref decl, ref body, _) => { - (v.visit_fn)( - &fk_anon(proto), - /* FIXME (#2543) */ copy *decl, - *body, - ex.span, - ex.id, - e, - v - ); - } - expr_fn_block(ref decl, ref body) => { - (v.visit_fn)( - &fk_fn_block, - /* FIXME (#2543) */ copy *decl, - *body, - ex.span, - ex.id, - e, - v - ); - } - expr_block(ref b) => (v.visit_block)((*b), e, v), - expr_assign(a, b) => { - (v.visit_expr)(b, e, v); - (v.visit_expr)(a, e, v); - } - expr_copy(a) => (v.visit_expr)(a, e, v), - expr_swap(a, b) => { (v.visit_expr)(a, e, v); (v.visit_expr)(b, e, v); } - expr_assign_op(_, a, b) => { - (v.visit_expr)(b, e, v); - (v.visit_expr)(a, e, v); - } - expr_field(x, _, tys) => { - (v.visit_expr)(x, e, v); - for tys.each |tp| { (v.visit_ty)(*tp, e, v); } - } - expr_index(a, b) => { - (v.visit_expr)(a, e, v); - (v.visit_expr)(b, e, v); - } - expr_path(p) => visit_path(p, e, v), - expr_break(_) => (), - expr_again(_) => (), - expr_ret(eo) => visit_expr_opt(eo, e, v), - expr_log(_, lv, x) => { - (v.visit_expr)(lv, e, v); - (v.visit_expr)(x, e, v); - } - expr_mac(ref mac) => visit_mac((*mac), e, v), - expr_paren(x) => (v.visit_expr)(x, e, v), + expr_vstore(x, _) => (v.visit_expr)(x, e, v), + expr_vec(ref es, _) => visit_exprs(*es, e, v), + expr_repeat(element, count, _) => { + (v.visit_expr)(element, e, v); + (v.visit_expr)(count, e, v); + } + expr_rec(ref flds, base) => { + for flds.each |f| { (v.visit_expr)(f.node.expr, e, v); } + visit_expr_opt(base, e, v); + } + expr_struct(p, ref flds, base) => { + visit_path(p, e, v); + for flds.each |f| { (v.visit_expr)(f.node.expr, e, v); } + visit_expr_opt(base, e, v); + } + expr_tup(ref elts) => { + for elts.each |el| { (v.visit_expr)(*el, e, v) } + } + expr_call(callee, ref args, _) => { + visit_exprs(*args, e, v); + (v.visit_expr)(callee, e, v); + } + expr_method_call(callee, _, ref tys, ref args, _) => { + visit_exprs(*args, e, v); + for tys.each |tp| { (v.visit_ty)(*tp, e, v); } + (v.visit_expr)(callee, e, v); + } + expr_binary(_, a, b) => { + (v.visit_expr)(a, e, v); + (v.visit_expr)(b, e, v); + } + expr_addr_of(_, x) | expr_unary(_, x) | + expr_loop_body(x) | expr_do_body(x) | + expr_assert(x) => (v.visit_expr)(x, e, v), + expr_lit(_) => (), + expr_cast(x, t) => { + (v.visit_expr)(x, e, v); + (v.visit_ty)(t, e, v); + } + expr_if(x, ref b, eo) => { + (v.visit_expr)(x, e, v); + (v.visit_block)(b, e, v); + visit_expr_opt(eo, e, v); + } + expr_while(x, ref b) => { + (v.visit_expr)(x, e, v); + (v.visit_block)(b, e, v); + } + expr_loop(ref b, _) => (v.visit_block)(b, e, v), + expr_match(x, ref arms) => { + (v.visit_expr)(x, e, v); + for arms.each |a| { (v.visit_arm)(a, e, v); } + } + expr_fn(proto, ref decl, ref body, _) => { + (v.visit_fn)( + &fk_anon(proto), + decl, + body, + ex.span, + ex.id, + e, + v + ); + } + expr_fn_block(ref decl, ref body) => { + (v.visit_fn)( + &fk_fn_block, + decl, + body, + ex.span, + ex.id, + e, + v + ); + } + expr_block(ref b) => (v.visit_block)(b, e, v), + expr_assign(a, b) => { + (v.visit_expr)(b, e, v); + (v.visit_expr)(a, e, v); + } + expr_copy(a) => (v.visit_expr)(a, e, v), + expr_swap(a, b) => { + (v.visit_expr)(a, e, v); + (v.visit_expr)(b, e, v); + } + expr_assign_op(_, a, b) => { + (v.visit_expr)(b, e, v); + (v.visit_expr)(a, e, v); + } + expr_field(x, _, ref tys) => { + (v.visit_expr)(x, e, v); + for tys.each |tp| { (v.visit_ty)(*tp, e, v); } + } + expr_index(a, b) => { + (v.visit_expr)(a, e, v); + (v.visit_expr)(b, e, v); + } + expr_path(p) => visit_path(p, e, v), + expr_break(_) => (), + expr_again(_) => (), + expr_ret(eo) => visit_expr_opt(eo, e, v), + expr_log(_, lv, x) => { + (v.visit_expr)(lv, e, v); + (v.visit_expr)(x, e, v); + } + expr_mac(ref mac) => visit_mac((*mac), e, v), + expr_paren(x) => (v.visit_expr)(x, e, v), } (v.visit_expr_post)(ex, e, v); } -pub fn visit_arm(a: arm, e: E, v: vt) { +pub fn visit_arm(a: &arm, e: E, v: vt) { for a.pats.each |p| { (v.visit_pat)(*p, e, v); } visit_expr_opt(a.guard, e, v); - (v.visit_block)(a.body, e, v); + (v.visit_block)(&a.body, e, v); } // Simpler, non-context passing interface. Always walks the whole tree, simply @@ -567,18 +585,18 @@ pub struct SimpleVisitor { visit_foreign_item: fn@(@foreign_item), visit_item: fn@(@item), visit_local: fn@(@local), - visit_block: fn@(ast::blk), + visit_block: fn@(&blk), visit_stmt: fn@(@stmt), - visit_arm: fn@(arm), + visit_arm: fn@(&arm), visit_pat: fn@(@pat), visit_decl: fn@(@decl), visit_expr: fn@(@expr), visit_expr_post: fn@(@expr), visit_ty: fn@(@Ty), visit_ty_params: fn@(&[ty_param]), - visit_fn: fn@(&fn_kind, fn_decl, blk, span, node_id), - visit_ty_method: fn@(ty_method), - visit_trait_method: fn@(trait_method), + visit_fn: fn@(&fn_kind, &fn_decl, &blk, span, node_id), + visit_ty_method: fn@(&ty_method), + visit_trait_method: fn@(&trait_method), visit_struct_def: fn@(@struct_def, ident, &[ty_param], node_id), visit_struct_field: fn@(@struct_field), visit_struct_method: fn@(@method) @@ -604,10 +622,10 @@ pub fn default_simple_visitor() -> @SimpleVisitor { visit_expr_post: |_e| { }, visit_ty: simple_ignore_ty, visit_ty_params: fn@(_ps: &[ty_param]) { }, - visit_fn: fn@(_fk: &fn_kind, _d: fn_decl, _b: blk, _sp: span, + visit_fn: fn@(_fk: &fn_kind, _d: &fn_decl, _b: &blk, _sp: span, _id: node_id) { }, - visit_ty_method: fn@(_m: ty_method) { }, - visit_trait_method: fn@(_m: trait_method) { }, + visit_ty_method: fn@(_m: &ty_method) { }, + visit_trait_method: fn@(_m: &trait_method) { }, visit_struct_def: fn@(_sd: @struct_def, _nm: ident, _tps: &[ty_param], _id: node_id) { }, visit_struct_field: fn@(_f: @struct_field) { }, @@ -644,7 +662,7 @@ pub fn mk_simple_visitor(v: simple_visitor) -> vt<()> { f(l); visit_local(l, e, v); } - fn v_block(f: fn@(ast::blk), bl: ast::blk, &&e: (), v: vt<()>) { + fn v_block(f: fn@(&blk), bl: &blk, &&e: (), v: vt<()>) { f(bl); visit_block(bl, e, v); } @@ -652,7 +670,7 @@ pub fn mk_simple_visitor(v: simple_visitor) -> vt<()> { f(st); visit_stmt(st, e, v); } - fn v_arm(f: fn@(arm), a: arm, &&e: (), v: vt<()>) { + fn v_arm(f: fn@(&arm), a: &arm, &&e: (), v: vt<()>) { f(a); visit_arm(a, e, v); } @@ -675,11 +693,11 @@ pub fn mk_simple_visitor(v: simple_visitor) -> vt<()> { f(ty); visit_ty(ty, e, v); } - fn v_ty_method(f: fn@(ty_method), ty: ty_method, &&e: (), v: vt<()>) { + fn v_ty_method(f: fn@(&ty_method), ty: &ty_method, &&e: (), v: vt<()>) { f(ty); visit_ty_method(ty, e, v); } - fn v_trait_method(f: fn@(trait_method), m: trait_method, &&e: (), + fn v_trait_method(f: fn@(&trait_method), m: &trait_method, &&e: (), v: vt<()>) { f(m); visit_trait_method(m, e, v); @@ -697,9 +715,16 @@ pub fn mk_simple_visitor(v: simple_visitor) -> vt<()> { f(ps); visit_ty_params(ps, e, v); } - fn v_fn(f: fn@(&fn_kind, fn_decl, blk, span, node_id), - fk: &fn_kind, decl: fn_decl, body: blk, sp: span, - id: node_id, &&e: (), v: vt<()>) { + fn v_fn( + f: fn@(&fn_kind, &fn_decl, &blk, span, node_id), + fk: &fn_kind, + decl: &fn_decl, + body: &blk, + sp: span, + id: node_id, + &&e: (), + v: vt<()> + ) { f(fk, decl, body, sp, id); visit_fn(fk, decl, body, sp, id, e, v); } From d20438695e133586d230e7e687473ad39acc52d3 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 09:06:13 -0800 Subject: [PATCH 06/30] libsyntax: make lexer vecs_implicitly_copyable-free --- src/libsyntax/parse/lexer.rs | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 92c4f1e828f62..54fdcc647ea41 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -103,8 +103,8 @@ fn dup_string_reader(r: @mut StringReader) -> @mut StringReader { curr: r.curr, filemap: r.filemap, interner: r.interner, - peek_tok: r.peek_tok, - peek_span: r.peek_span + peek_tok: copy r.peek_tok, + peek_span: copy r.peek_span } } @@ -112,9 +112,12 @@ impl reader for StringReader { fn is_eof(@mut self) -> bool { is_eof(self) } // return the next token. EFFECT: advances the string_reader. fn next_token(@mut self) -> TokenAndSpan { - let ret_val = TokenAndSpan {tok: self.peek_tok, sp: self.peek_span}; + let ret_val = TokenAndSpan { + tok: copy self.peek_tok, + sp: copy self.peek_span, + }; string_advance_token(self); - return ret_val; + ret_val } fn fatal(@mut self, m: ~str) -> ! { self.span_diagnostic.span_fatal(copy self.peek_span, m) @@ -122,7 +125,10 @@ impl reader for StringReader { fn span_diag(@mut self) -> span_handler { self.span_diagnostic } pure fn interner(@mut self) -> @token::ident_interner { self.interner } fn peek(@mut self) -> TokenAndSpan { - TokenAndSpan {tok: self.peek_tok, sp: self.peek_span} + TokenAndSpan { + tok: copy self.peek_tok, + sp: copy self.peek_span, + } } fn dup(@mut self) -> reader { dup_string_reader(self) as reader } } @@ -136,7 +142,10 @@ pub impl reader for TtReader { fn span_diag(@mut self) -> span_handler { self.sp_diag } pure fn interner(@mut self) -> @token::ident_interner { self.interner } fn peek(@mut self) -> TokenAndSpan { - TokenAndSpan { tok: self.cur_tok, sp: self.cur_span } + TokenAndSpan { + tok: copy self.cur_tok, + sp: copy self.cur_span, + } } fn dup(@mut self) -> reader { dup_tt_reader(self) as reader } } @@ -145,8 +154,8 @@ pub impl reader for TtReader { fn string_advance_token(r: @mut StringReader) { match (consume_whitespace_and_comments(r)) { Some(comment) => { - r.peek_tok = comment.tok; - r.peek_span = comment.sp; + r.peek_tok = copy comment.tok; + r.peek_span = copy comment.sp; }, None => { if is_eof(r) { From 8b94ef0302c898b0643810c9a31978684b607a37 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Wed, 20 Feb 2013 21:04:05 -0800 Subject: [PATCH 07/30] libsyntax: fix the span in parse_bottom_expr's INTERPOLATED handler --- src/libsyntax/parse/parser.rs | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 9bac163dab6ef..69790e0077934 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -127,18 +127,23 @@ enum view_item_parse_mode { The important thing is to make sure that lookahead doesn't balk at INTERPOLATED tokens */ macro_rules! maybe_whole_expr ( - ($p:expr) => ( match *$p.token { - INTERPOLATED(token::nt_expr(e)) => { - $p.bump(); - return e; - } - INTERPOLATED(token::nt_path(pt)) => { - $p.bump(); - return $p.mk_expr($p.span.lo, $p.span.lo, - expr_path(pt)); - } - _ => () - }) + ($p:expr) => ( + match *$p.token { + INTERPOLATED(token::nt_expr(e)) => { + $p.bump(); + return e; + } + INTERPOLATED(token::nt_path(pt)) => { + $p.bump(); + return $p.mk_expr( + $p.span.lo, + $p.span.hi, + expr_path(pt) + ); + } + _ => () + } + ) ) macro_rules! maybe_whole ( From 1deb858b2217cd9107bef4d64d35f1489f77f2ff Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 09:39:29 -0800 Subject: [PATCH 08/30] libsyntax: is_keyword should take a &~str --- src/libsyntax/ext/trace_macros.rs | 4 +- src/libsyntax/parse/common.rs | 42 +++--- src/libsyntax/parse/obsolete.rs | 4 +- src/libsyntax/parse/parser.rs | 210 +++++++++++++++--------------- 4 files changed, 132 insertions(+), 128 deletions(-) diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index 842727f092a29..fb7b41be2d670 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -31,9 +31,9 @@ pub fn expand_trace_macros(cx: ext_ctxt, sp: span, let rdr = tt_rdr as reader; let rust_parser = Parser(sess, cfg, rdr.dup()); - if rust_parser.is_keyword(~"true") { + if rust_parser.is_keyword(&~"true") { cx.set_trace_macros(true); - } else if rust_parser.is_keyword(~"false") { + } else if rust_parser.is_keyword(&~"false") { cx.set_trace_macros(false); } else { cx.span_fatal(sp, ~"trace_macros! only accepts `true` or `false`") diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 57d62d628dc6f..0317ae14e8cc4 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -110,25 +110,25 @@ pub impl Parser { // Storing keywords as interned idents instead of strings would be nifty. // A sanity check that the word we are asking for is a known keyword - fn require_keyword(word: ~str) { - if !self.keywords.contains_key(&word) { - self.bug(fmt!("unknown keyword: %s", word)); + fn require_keyword(word: &~str) { + if !self.keywords.contains_key(word) { + self.bug(fmt!("unknown keyword: %s", *word)); } } - fn token_is_word(word: ~str, ++tok: token::Token) -> bool { + fn token_is_word(word: &~str, tok: token::Token) -> bool { match tok { - token::IDENT(sid, false) => { *self.id_to_str(sid) == word } + token::IDENT(sid, false) => { *self.id_to_str(sid) == *word } _ => { false } } } - fn token_is_keyword(word: ~str, ++tok: token::Token) -> bool { + fn token_is_keyword(word: &~str, ++tok: token::Token) -> bool { self.require_keyword(word); self.token_is_word(word, tok) } - fn is_keyword(word: ~str) -> bool { + fn is_keyword(word: &~str) -> bool { self.token_is_keyword(word, *self.token) } @@ -141,62 +141,62 @@ pub impl Parser { } } - fn eat_keyword(word: ~str) -> bool { + fn eat_keyword(word: &~str) -> bool { self.require_keyword(word); let is_kw = match *self.token { - token::IDENT(sid, false) => (word == *self.id_to_str(sid)), + token::IDENT(sid, false) => *word == *self.id_to_str(sid), _ => false }; if is_kw { self.bump() } is_kw } - fn expect_keyword(word: ~str) { + fn expect_keyword(word: &~str) { self.require_keyword(word); if !self.eat_keyword(word) { - self.fatal(~"expected `" + word + ~"`, found `" + + self.fatal(~"expected `" + *word + ~"`, found `" + token_to_str(self.reader, *self.token) + ~"`"); } } - fn is_strict_keyword(word: ~str) -> bool { - self.strict_keywords.contains_key(&word) + fn is_strict_keyword(word: &~str) -> bool { + self.strict_keywords.contains_key(word) } fn check_strict_keywords() { match *self.token { token::IDENT(_, false) => { let w = token_to_str(self.reader, *self.token); - self.check_strict_keywords_(w); + self.check_strict_keywords_(&w); } _ => () } } - fn check_strict_keywords_(w: ~str) { + fn check_strict_keywords_(w: &~str) { if self.is_strict_keyword(w) { - self.fatal(~"found `" + w + ~"` in ident position"); + self.fatal(~"found `" + *w + ~"` in ident position"); } } - fn is_reserved_keyword(word: ~str) -> bool { - self.reserved_keywords.contains_key(&word) + fn is_reserved_keyword(word: &~str) -> bool { + self.reserved_keywords.contains_key(word) } fn check_reserved_keywords() { match *self.token { token::IDENT(_, false) => { let w = token_to_str(self.reader, *self.token); - self.check_reserved_keywords_(w); + self.check_reserved_keywords_(&w); } _ => () } } - fn check_reserved_keywords_(w: ~str) { + fn check_reserved_keywords_(w: &~str) { if self.is_reserved_keyword(w) { - self.fatal(~"`" + w + ~"` is a reserved keyword"); + self.fatal(~"`" + *w + ~"` is a reserved keyword"); } } diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 1ae8786e09bb2..02c2fb404c207 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -197,9 +197,9 @@ pub impl Parser { } fn try_parse_obsolete_priv_section() -> bool { - if self.is_keyword(~"priv") && self.look_ahead(1) == token::LBRACE { + if self.is_keyword(&~"priv") && self.look_ahead(1) == token::LBRACE { self.obsolete(*self.span, ObsoletePrivSection); - self.eat_keyword(~"priv"); + self.eat_keyword(&~"priv"); self.bump(); while *self.token != token::RBRACE { self.parse_single_class_item(ast::private); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 69790e0077934..ffc8a28a54583 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -310,10 +310,10 @@ pub impl Parser { pure fn id_to_str(id: ident) -> @~str { self.sess.interner.get(id) } fn token_is_closure_keyword(+tok: token::Token) -> bool { - self.token_is_keyword(~"pure", tok) || - self.token_is_keyword(~"unsafe", tok) || - self.token_is_keyword(~"once", tok) || - self.token_is_keyword(~"fn", tok) + self.token_is_keyword(&~"pure", tok) || + self.token_is_keyword(&~"unsafe", tok) || + self.token_is_keyword(&~"once", tok) || + self.token_is_keyword(&~"fn", tok) } fn parse_ty_bare_fn() -> ty_ @@ -333,7 +333,7 @@ pub impl Parser { */ let purity = self.parse_purity(); - self.expect_keyword(~"fn"); + self.expect_keyword(&~"fn"); return ty_bare_fn(@TyBareFn { abi: RustAbi, purity: purity, @@ -364,7 +364,7 @@ pub impl Parser { let purity = self.parse_purity(); let onceness = parse_onceness(&self); - self.expect_keyword(~"fn"); + self.expect_keyword(&~"fn"); let post_sigil = self.parse_fn_ty_sigil(); let sigil = match (pre_sigil, post_sigil) { @@ -392,14 +392,14 @@ pub impl Parser { }); fn parse_onceness(self: &Parser) -> Onceness { - if self.eat_keyword(~"once") {Once} else {Many} + if self.eat_keyword(&~"once") { Once } else { Many } } } fn parse_purity() -> purity { - if self.eat_keyword(~"pure") { + if self.eat_keyword(&~"pure") { return pure_fn; - } else if self.eat_keyword(~"unsafe") { + } else if self.eat_keyword(&~"unsafe") { return unsafe_fn; } else { return impure_fn; @@ -640,7 +640,7 @@ pub impl Parser { } else if *self.token == token::BINOP(token::AND) { self.bump(); self.parse_borrowed_pointee() - } else if self.eat_keyword(~"extern") { + } else if self.eat_keyword(&~"extern") { self.parse_ty_bare_fn() } else if self.token_is_closure_keyword(*self.token) { self.parse_ty_closure(None, None) @@ -648,7 +648,9 @@ pub impl Parser { || is_ident_or_path(*self.token) { let path = self.parse_path_with_tps(colons_before_params); ty_path(path, self.get_id()) - } else { self.fatal(~"expected type"); }; + } else { + self.fatal(~"expected type"); + }; let sp = mk_sp(lo, self.last_span.hi); @Ty {id: self.get_id(), node: t, span: sp} @@ -667,8 +669,10 @@ pub impl Parser { token::IDENT(rname, _) => { if self.look_ahead(1u) == token::BINOP(token::SLASH) && - self.token_is_closure_keyword(self.look_ahead(2u)) { - self.bump(); self.bump(); + self.token_is_closure_keyword(self.look_ahead(2u)) + { + self.bump(); + self.bump(); return self.parse_ty_closure(Some(sigil), Some(rname)); } else if self.token_is_closure_keyword(*self.token) { return self.parse_ty_closure(Some(sigil), None); @@ -756,7 +760,7 @@ pub impl Parser { fn parse_capture_item_or(parse_arg_fn: fn(Parser) -> arg_or_capture_item) -> arg_or_capture_item { - if self.eat_keyword(~"copy") { + if self.eat_keyword(&~"copy") { // XXX outdated syntax now that moves-based-on-type has gone in self.parse_ident(); either::Right(()) @@ -772,7 +776,7 @@ pub impl Parser { let mut is_mutbl = false; let pat = if require_name || self.is_named_argument() { m = self.parse_arg_mode(); - is_mutbl = self.eat_keyword(~"mut"); + is_mutbl = self.eat_keyword(&~"mut"); let pat = self.parse_pat(false); self.expect(token::COLON); pat @@ -800,7 +804,7 @@ pub impl Parser { fn parse_fn_block_arg() -> arg_or_capture_item { do self.parse_capture_item_or |p| { let m = p.parse_arg_mode(); - let is_mutbl = self.eat_keyword(~"mut"); + let is_mutbl = self.eat_keyword(&~"mut"); let pat = p.parse_pat(false); let t = if p.eat(token::COLON) { p.parse_ty(false) @@ -856,9 +860,9 @@ pub impl Parser { fn parse_lit() -> lit { let lo = self.span.lo; - let lit = if self.eat_keyword(~"true") { + let lit = if self.eat_keyword(&~"true") { lit_bool(true) - } else if self.eat_keyword(~"false") { + } else if self.eat_keyword(&~"false") { lit_bool(false) } else { // XXX: This is a really bad copy! @@ -1024,9 +1028,9 @@ pub impl Parser { } fn parse_mutability() -> mutability { - if self.eat_keyword(~"mut") { + if self.eat_keyword(&~"mut") { m_mutbl - } else if self.eat_keyword(~"const") { + } else if self.eat_keyword(&~"const") { m_const } else { m_imm @@ -1124,21 +1128,21 @@ pub impl Parser { } } else if token::is_bar(*self.token) { return self.parse_lambda_expr(); - } else if self.eat_keyword(~"if") { + } else if self.eat_keyword(&~"if") { return self.parse_if_expr(); - } else if self.eat_keyword(~"for") { + } else if self.eat_keyword(&~"for") { return self.parse_sugary_call_expr(~"for", ForSugar, expr_loop_body); - } else if self.eat_keyword(~"do") { + } else if self.eat_keyword(&~"do") { return self.parse_sugary_call_expr(~"do", DoSugar, expr_do_body); - } else if self.eat_keyword(~"while") { + } else if self.eat_keyword(&~"while") { return self.parse_while_expr(); - } else if self.eat_keyword(~"loop") { + } else if self.eat_keyword(&~"loop") { return self.parse_loop_expr(); - } else if self.eat_keyword(~"match") { + } else if self.eat_keyword(&~"match") { return self.parse_match_expr(); - } else if self.eat_keyword(~"fn") { + } else if self.eat_keyword(&~"fn") { let opt_sigil = self.parse_fn_ty_sigil(); let sigil = match opt_sigil { None => { @@ -1147,7 +1151,7 @@ pub impl Parser { Some(p) => { p } }; return self.parse_fn_expr(sigil); - } else if self.eat_keyword(~"unsafe") { + } else if self.eat_keyword(&~"unsafe") { return self.parse_block_expr(lo, unsafe_blk); } else if *self.token == token::LBRACKET { self.bump(); @@ -1182,7 +1186,7 @@ pub impl Parser { } } hi = self.span.hi; - } else if self.eat_keyword(~"log") { + } else if self.eat_keyword(&~"log") { self.expect(token::LPAREN); let lvl = self.parse_expr(); self.expect(token::COMMA); @@ -1190,30 +1194,30 @@ pub impl Parser { ex = expr_log(ast::log_other, lvl, e); hi = self.span.hi; self.expect(token::RPAREN); - } else if self.eat_keyword(~"assert") { + } else if self.eat_keyword(&~"assert") { let e = self.parse_expr(); ex = expr_assert(e); hi = e.span.hi; - } else if self.eat_keyword(~"return") { + } else if self.eat_keyword(&~"return") { if can_begin_expr(*self.token) { let e = self.parse_expr(); hi = e.span.hi; ex = expr_ret(Some(e)); } else { ex = expr_ret(None); } - } else if self.eat_keyword(~"break") { + } else if self.eat_keyword(&~"break") { if is_ident(*self.token) { ex = expr_break(Some(self.parse_ident())); } else { ex = expr_break(None); } hi = self.span.hi; - } else if self.eat_keyword(~"copy") { + } else if self.eat_keyword(&~"copy") { let e = self.parse_expr(); ex = expr_copy(e); hi = e.span.hi; } else if *self.token == token::MOD_SEP || - is_ident(*self.token) && !self.is_keyword(~"true") && - !self.is_keyword(~"false") { + is_ident(*self.token) && !self.is_keyword(&~"true") && + !self.is_keyword(&~"false") { let pth = self.parse_path_with_tps(true); /* `!`, as an operator, is prefix, so we know this isn't that */ @@ -1642,7 +1646,7 @@ pub impl Parser { } } None => { - if as_prec > min_prec && self.eat_keyword(~"as") { + if as_prec > min_prec && self.eat_keyword(&~"as") { let rhs = self.parse_ty(true); let _as = self.mk_expr(lhs.span.lo, rhs.span.hi, @@ -1714,7 +1718,7 @@ pub impl Parser { let thn = self.parse_block(); let mut els: Option<@expr> = None; let mut hi = thn.span.hi; - if self.eat_keyword(~"else") { + if self.eat_keyword(&~"else") { let elexpr = self.parse_else_expr(); els = Some(elexpr); hi = elexpr.span.hi; @@ -1788,7 +1792,7 @@ pub impl Parser { } fn parse_else_expr() -> @expr { - if self.eat_keyword(~"if") { + if self.eat_keyword(&~"if") { return self.parse_if_expr(); } else { let blk = self.parse_block(); @@ -1904,7 +1908,7 @@ pub impl Parser { fn looking_at_record_literal() -> bool { let lookahead = self.look_ahead(1); *self.token == token::LBRACE && - (self.token_is_keyword(~"mut", lookahead) || + (self.token_is_keyword(&~"mut", lookahead) || (is_plain_ident(lookahead) && self.look_ahead(2) == token::COLON)) } @@ -1945,7 +1949,7 @@ pub impl Parser { while *self.token != token::RBRACE { let pats = self.parse_pats(); let mut guard = None; - if self.eat_keyword(~"if") { guard = Some(self.parse_expr()); } + if self.eat_keyword(&~"if") { guard = Some(self.parse_expr()); } self.expect(token::FAT_ARROW); let expr = self.parse_expr_res(RESTRICT_STMT_EXPR); @@ -2210,8 +2214,8 @@ pub impl Parser { } copy tok => { if !is_ident_or_path(tok) - || self.is_keyword(~"true") - || self.is_keyword(~"false") + || self.is_keyword(&~"true") + || self.is_keyword(&~"false") { let val = self.parse_expr_res(RESTRICT_NO_BAR_OP); if self.eat(token::DOTDOT) { @@ -2220,10 +2224,10 @@ pub impl Parser { } else { pat = pat_lit(val); } - } else if self.eat_keyword(~"ref") { + } else if self.eat_keyword(&~"ref") { let mutbl = self.parse_mutability(); pat = self.parse_pat_ident(refutable, bind_by_ref(mutbl)); - } else if self.eat_keyword(~"copy") { + } else if self.eat_keyword(&~"copy") { pat = self.parse_pat_ident(refutable, bind_by_copy); } else { // XXX---refutable match bindings should work same as let @@ -2355,7 +2359,7 @@ pub impl Parser { } fn parse_let() -> @decl { - let is_mutbl = self.eat_keyword(~"mut"); + let is_mutbl = self.eat_keyword(&~"mut"); let lo = self.span.lo; let mut locals = ~[self.parse_local(is_mutbl, true)]; while self.eat(token::COMMA) { @@ -2368,7 +2372,7 @@ pub impl Parser { fn parse_instance_var(pr: visibility) -> @struct_field { let mut is_mutbl = struct_immutable; let lo = self.span.lo; - if self.eat_keyword(~"mut") { + if self.eat_keyword(&~"mut") { is_mutbl = struct_mutable; } if !is_plain_ident(*self.token) { @@ -2395,9 +2399,9 @@ pub impl Parser { } let lo = self.span.lo; - if self.is_keyword(~"let") { + if self.is_keyword(&~"let") { check_expected_item(self, first_item_attrs); - self.expect_keyword(~"let"); + self.expect_keyword(&~"let"); let decl = self.parse_let(); return @spanned(lo, decl.span.hi, stmt_decl(decl, self.get_id())); } else if is_ident(*self.token) @@ -2491,7 +2495,7 @@ pub impl Parser { } let lo = self.span.lo; - if self.eat_keyword(~"unsafe") { + if self.eat_keyword(&~"unsafe") { self.obsolete(*self.span, ObsoleteUnsafeBlock); } self.expect(token::LBRACE); @@ -2629,9 +2633,9 @@ pub impl Parser { } fn parse_optional_purity() -> ast::purity { - if self.eat_keyword(~"pure") { + if self.eat_keyword(&~"pure") { ast::pure_fn - } else if self.eat_keyword(~"unsafe") { + } else if self.eat_keyword(&~"unsafe") { ast::unsafe_fn } else { ast::impure_fn @@ -2639,7 +2643,7 @@ pub impl Parser { } fn parse_optional_onceness() -> ast::Onceness { - if self.eat_keyword(~"once") { ast::Once } else { ast::Many } + if self.eat_keyword(&~"once") { ast::Once } else { ast::Many } } fn parse_optional_ty_param_bounds() -> @~[ty_param_bound] { @@ -2647,7 +2651,7 @@ pub impl Parser { if self.eat(token::COLON) { loop { if self.eat(token::BINOP(token::AND)) { - if self.eat_keyword(~"static") { + if self.eat_keyword(&~"static") { bounds.push(RegionTyParamBound); } else { self.span_err(*self.span, @@ -2760,10 +2764,10 @@ pub impl Parser { fn maybe_parse_self_ty(cnstr: fn(+v: mutability) -> ast::self_ty_, p: Parser) -> ast::self_ty_ { // We need to make sure it isn't a mode or a type - if p.token_is_keyword(~"self", p.look_ahead(1)) || - ((p.token_is_keyword(~"const", p.look_ahead(1)) || - p.token_is_keyword(~"mut", p.look_ahead(1))) && - p.token_is_keyword(~"self", p.look_ahead(2))) { + if p.token_is_keyword(&~"self", p.look_ahead(1)) || + ((p.token_is_keyword(&~"const", p.look_ahead(1)) || + p.token_is_keyword(&~"mut", p.look_ahead(1))) && + p.token_is_keyword(&~"self", p.look_ahead(2))) { p.bump(); let mutability = p.parse_mutability(); @@ -2975,7 +2979,7 @@ pub impl Parser { let mut ty = self.parse_ty(false); // Parse traits, if necessary. - let opt_trait = if self.eat_keyword(~"for") { + let opt_trait = if self.eat_keyword(&~"for") { // New-style trait. Reinterpret the type as a trait. let opt_trait_ref = match ty.node { ty_path(path, node_id) => { @@ -3184,11 +3188,11 @@ pub impl Parser { let attrs = self.parse_outer_attributes(); - if self.eat_keyword(~"priv") { + if self.eat_keyword(&~"priv") { return members(~[self.parse_single_class_item(private)]) } - if self.eat_keyword(~"pub") { + if self.eat_keyword(&~"pub") { return members(~[self.parse_single_class_item(public)]); } @@ -3196,7 +3200,7 @@ pub impl Parser { return members(~[]); } - if self.eat_keyword(~"drop") { + if self.eat_keyword(&~"drop") { return self.parse_dtor(attrs); } else { @@ -3205,12 +3209,12 @@ pub impl Parser { } fn parse_visibility() -> visibility { - if self.eat_keyword(~"pub") { public } - else if self.eat_keyword(~"priv") { private } + if self.eat_keyword(&~"pub") { public } + else if self.eat_keyword(&~"priv") { private } else { inherited } } fn parse_staticness() -> bool { - self.eat_keyword(~"static") + self.eat_keyword(&~"static") } // given a termination token and a vector of already-parsed @@ -3412,7 +3416,7 @@ pub impl Parser { fn parse_item_foreign_const(vis: ast::visibility, +attrs: ~[attribute]) -> @foreign_item { let lo = self.span.lo; - self.expect_keyword(~"const"); + self.expect_keyword(&~"const"); let ident = self.parse_ident(); self.expect(token::COLON); let ty = self.parse_ty(false); @@ -3427,12 +3431,12 @@ pub impl Parser { } fn parse_fn_purity() -> purity { - if self.eat_keyword(~"fn") { impure_fn } - else if self.eat_keyword(~"pure") { - self.expect_keyword(~"fn"); + if self.eat_keyword(&~"fn") { impure_fn } + else if self.eat_keyword(&~"pure") { + self.expect_keyword(&~"fn"); pure_fn - } else if self.eat_keyword(~"unsafe") { - self.expect_keyword(~"fn"); + } else if self.eat_keyword(&~"unsafe") { + self.expect_keyword(&~"fn"); unsafe_fn } else { self.unexpected(); } @@ -3440,7 +3444,7 @@ pub impl Parser { fn parse_foreign_item(+attrs: ~[attribute]) -> @foreign_item { let vis = self.parse_visibility(); - if self.is_keyword(~"const") { + if self.is_keyword(&~"const") { self.parse_item_foreign_const(vis, attrs) } else { self.parse_item_foreign_fn(attrs) @@ -3496,9 +3500,9 @@ pub impl Parser { } let mut must_be_named_mod = false; - if self.is_keyword(~"mod") { + if self.is_keyword(&~"mod") { must_be_named_mod = true; - self.expect_keyword(~"mod"); + self.expect_keyword(&~"mod"); } else if *self.token != token::LBRACE { self.span_fatal(*self.span, fmt!("expected `{` or `mod` but found %s", @@ -3633,7 +3637,7 @@ pub impl Parser { let vlo = self.span.lo; // Is this a common field declaration? - if self.eat_keyword(~"struct") { + if self.eat_keyword(&~"struct") { if common_fields.is_some() { self.fatal(~"duplicate declaration of shared fields"); } @@ -3647,7 +3651,7 @@ pub impl Parser { // Is this a nested enum declaration? let ident, needs_comma, kind; let mut args = ~[], disr_expr = None; - if self.eat_keyword(~"enum") { + if self.eat_keyword(&~"enum") { ident = self.parse_ident(); self.expect(token::LBRACE); let nested_enum_def = self.parse_enum_def(ty_params); @@ -3778,57 +3782,57 @@ pub impl Parser { let lo = self.span.lo; let visibility; - if self.eat_keyword(~"pub") { + if self.eat_keyword(&~"pub") { visibility = public; - } else if self.eat_keyword(~"priv") { + } else if self.eat_keyword(&~"priv") { visibility = private; } else { visibility = inherited; } - if items_allowed && self.eat_keyword(~"const") { + if items_allowed && self.eat_keyword(&~"const") { // CONST ITEM let (ident, item_, extra_attrs) = self.parse_item_const(); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if foreign_items_allowed && self.is_keyword(~"const") { + } else if foreign_items_allowed && self.is_keyword(&~"const") { // FOREIGN CONST ITEM let item = self.parse_item_foreign_const(visibility, attrs); return iovi_foreign_item(item); } else if items_allowed && // FUNCTION ITEM (not sure about lookahead condition...) - self.is_keyword(~"fn") && + self.is_keyword(&~"fn") && !self.fn_expr_lookahead(self.look_ahead(1u)) { self.bump(); let (ident, item_, extra_attrs) = self.parse_item_fn(impure_fn); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if items_allowed && self.eat_keyword(~"pure") { + } else if items_allowed && self.eat_keyword(&~"pure") { // PURE FUNCTION ITEM - self.expect_keyword(~"fn"); + self.expect_keyword(&~"fn"); let (ident, item_, extra_attrs) = self.parse_item_fn(pure_fn); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); } else if foreign_items_allowed && - (self.is_keyword(~"fn") || self.is_keyword(~"pure") || - self.is_keyword(~"unsafe")) { + (self.is_keyword(&~"fn") || self.is_keyword(&~"pure") || + self.is_keyword(&~"unsafe")) { // FOREIGN FUNCTION ITEM (no items allowed) let item = self.parse_item_foreign_fn(attrs); return iovi_foreign_item(item); - } else if items_allowed && self.is_keyword(~"unsafe") + } else if items_allowed && self.is_keyword(&~"unsafe") && self.look_ahead(1u) != token::LBRACE { // UNSAFE FUNCTION ITEM (where items are allowed) self.bump(); - self.expect_keyword(~"fn"); + self.expect_keyword(&~"fn"); let (ident, item_, extra_attrs) = self.parse_item_fn(unsafe_fn); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if self.eat_keyword(~"extern") { - if items_allowed && self.eat_keyword(~"fn") { + } else if self.eat_keyword(&~"extern") { + if items_allowed && self.eat_keyword(&~"fn") { // EXTERN FUNCTION ITEM let (ident, item_, extra_attrs) = self.parse_item_fn(extern_fn); @@ -3840,43 +3844,43 @@ pub impl Parser { // EXTERN MODULE ITEM return self.parse_item_foreign_mod(lo, visibility, attrs, items_allowed); - } else if items_allowed && self.eat_keyword(~"mod") { + } else if items_allowed && self.eat_keyword(&~"mod") { // MODULE ITEM let (ident, item_, extra_attrs) = self.parse_item_mod(attrs); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if items_allowed && self.eat_keyword(~"type") { + } else if items_allowed && self.eat_keyword(&~"type") { // TYPE ITEM let (ident, item_, extra_attrs) = self.parse_item_type(); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if items_allowed && self.eat_keyword(~"enum") { + } else if items_allowed && self.eat_keyword(&~"enum") { // ENUM ITEM let (ident, item_, extra_attrs) = self.parse_item_enum(); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if items_allowed && self.eat_keyword(~"trait") { + } else if items_allowed && self.eat_keyword(&~"trait") { // TRAIT ITEM let (ident, item_, extra_attrs) = self.parse_item_trait(); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if items_allowed && self.eat_keyword(~"impl") { + } else if items_allowed && self.eat_keyword(&~"impl") { // IMPL ITEM let (ident, item_, extra_attrs) = self.parse_item_impl(); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if items_allowed && self.eat_keyword(~"struct") { + } else if items_allowed && self.eat_keyword(&~"struct") { // STRUCT ITEM let (ident, item_, extra_attrs) = self.parse_item_struct(); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); - } else if self.eat_keyword(~"use") { + } else if self.eat_keyword(&~"use") { // USE ITEM let view_item = self.parse_use(); self.expect(token::SEMI); @@ -3959,7 +3963,7 @@ pub impl Parser { let lo = self.span.lo; let namespace; - if self.eat_keyword(~"mod") { + if self.eat_keyword(&~"mod") { namespace = module_ns; } else { namespace = type_value_ns; @@ -4053,25 +4057,25 @@ pub impl Parser { fn is_view_item() -> bool { let tok, next_tok; - if !self.is_keyword(~"pub") && !self.is_keyword(~"priv") { + if !self.is_keyword(&~"pub") && !self.is_keyword(&~"priv") { tok = *self.token; next_tok = self.look_ahead(1); } else { tok = self.look_ahead(1); next_tok = self.look_ahead(2); }; - self.token_is_keyword(~"use", tok) - || (self.token_is_keyword(~"extern", tok) && - self.token_is_keyword(~"mod", next_tok)) + self.token_is_keyword(&~"use", tok) + || (self.token_is_keyword(&~"extern", tok) && + self.token_is_keyword(&~"mod", next_tok)) } // parse a view item. fn parse_view_item(+attrs: ~[attribute], vis: visibility) -> @view_item { let lo = self.span.lo; - let node = if self.eat_keyword(~"use") { + let node = if self.eat_keyword(&~"use") { self.parse_use() - } else if self.eat_keyword(~"extern") { - self.expect_keyword(~"mod"); + } else if self.eat_keyword(&~"extern") { + self.expect_keyword(&~"mod"); let ident = self.parse_ident(); let metadata = self.parse_optional_meta(); view_item_extern_mod(ident, metadata, self.get_id()) From 4650da5888427b60c6d38308bcdd76da40f1b181 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 09:54:41 -0800 Subject: [PATCH 09/30] libsyntax: change eat to take a &token --- src/libsyntax/ext/base.rs | 2 +- src/libsyntax/parse/common.rs | 4 +- src/libsyntax/parse/parser.rs | 76 +++++++++++++++++------------------ 3 files changed, 41 insertions(+), 41 deletions(-) diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 8d99c43d43d20..b7641e8b19b9a 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -341,7 +341,7 @@ pub fn get_exprs_from_tts(cx: ext_ctxt, tts: &[ast::token_tree]) let mut es = ~[]; while *p.token != token::EOF { if es.len() != 0 { - p.eat(token::COMMA); + p.eat(&token::COMMA); } es.push(p.parse_expr()); } diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 0317ae14e8cc4..e336b2bb81465 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -103,8 +103,8 @@ pub impl Parser { // consume token 'tok' if it exists. Returns true if the given // token was present, false otherwise. - fn eat(tok: token::Token) -> bool { - return if *self.token == tok { self.bump(); true } else { false }; + fn eat(tok: &token::Token) -> bool { + return if *self.token == *tok { self.bump(); true } else { false }; } // Storing keywords as interned idents instead of strings would be nifty. diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index ffc8a28a54583..a6834d1b88650 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -417,7 +417,7 @@ pub impl Parser { Lifetimes */ - if self.eat(token::LT) { + if self.eat(&token::LT) { let _lifetimes = self.parse_lifetimes(); self.expect(token::GT); } @@ -525,9 +525,9 @@ pub impl Parser { } fn parse_ret_ty() -> (ret_style, @Ty) { - return if self.eat(token::RARROW) { + return if self.eat(&token::RARROW) { let lo = self.span.lo; - if self.eat(token::NOT) { + if self.eat(&token::NOT) { ( noreturn, @Ty { @@ -719,12 +719,12 @@ pub impl Parser { } fn parse_arg_mode() -> mode { - if self.eat(token::BINOP(token::MINUS)) { + if self.eat(&token::BINOP(token::MINUS)) { expl(by_copy) // NDM outdated syntax - } else if self.eat(token::ANDAND) { + } else if self.eat(&token::ANDAND) { expl(by_ref) - } else if self.eat(token::BINOP(token::PLUS)) { - if self.eat(token::BINOP(token::PLUS)) { + } else if self.eat(&token::BINOP(token::PLUS)) { + if self.eat(&token::BINOP(token::PLUS)) { expl(by_val) } else { expl(by_copy) @@ -806,7 +806,7 @@ pub impl Parser { let m = p.parse_arg_mode(); let is_mutbl = self.eat_keyword(&~"mut"); let pat = p.parse_pat(false); - let t = if p.eat(token::COLON) { + let t = if p.eat(&token::COLON) { p.parse_ty(false) } else { @Ty { @@ -826,7 +826,7 @@ pub impl Parser { } fn maybe_parse_fixed_vstore_with_star() -> Option { - if self.eat(token::BINOP(token::STAR)) { + if self.eat(&token::BINOP(token::STAR)) { match *self.token { token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 => { self.bump(); @@ -884,7 +884,7 @@ pub impl Parser { maybe_whole!(self, nt_path); let lo = self.span.lo; - let global = self.eat(token::MOD_SEP); + let global = self.eat(&token::MOD_SEP); let mut ids = ~[]; loop { let is_not_last = @@ -917,7 +917,7 @@ pub impl Parser { maybe_whole!(self, nt_path); let lo = self.span.lo; let path = self.parse_path_without_tps(); - if colons && !self.eat(token::MOD_SEP) { + if colons && !self.eat(&token::MOD_SEP) { return path; } @@ -940,7 +940,7 @@ pub impl Parser { // Parse any lifetime or type parameters which may appear: let tps = { - if !self.eat(token::LT) { + if !self.eat(&token::LT) { ~[] } else { // First consume lifetimes. @@ -1251,7 +1251,7 @@ pub impl Parser { self.expect(token::COMMA); - if self.eat(token::DOTDOT) { + if self.eat(&token::DOTDOT) { base = Some(self.parse_expr()); break; } @@ -1302,12 +1302,12 @@ pub impl Parser { let mut hi; loop { // expr.f - if self.eat(token::DOT) { + if self.eat(&token::DOT) { match *self.token { token::IDENT(i, _) => { hi = self.span.hi; self.bump(); - let tys = if self.eat(token::MOD_SEP) { + let tys = if self.eat(&token::MOD_SEP) { self.expect(token::LT); self.parse_seq_to_gt(Some(token::COMMA), |p| p.parse_ty(false)) @@ -1960,7 +1960,7 @@ pub impl Parser { if require_comma { self.expect(token::COMMA); } else { - self.eat(token::COMMA); + self.eat(&token::COMMA); } let blk = codemap::spanned { @@ -2218,7 +2218,7 @@ pub impl Parser { || self.is_keyword(&~"false") { let val = self.parse_expr_res(RESTRICT_NO_BAR_OP); - if self.eat(token::DOTDOT) { + if self.eat(&token::DOTDOT) { let end = self.parse_expr_res(RESTRICT_NO_BAR_OP); pat = pat_range(val, end); } else { @@ -2246,7 +2246,7 @@ pub impl Parser { if is_plain_ident(*self.token) && cannot_be_enum_or_struct { let name = self.parse_value_path(); let sub; - if self.eat(token::AT) { + if self.eat(&token::AT) { sub = Some(self.parse_pat(refutable)); } else { sub = None; @@ -2315,7 +2315,7 @@ pub impl Parser { ~"expected identifier, found path"); } let name = self.parse_value_path(); - let sub = if self.eat(token::AT) { + let sub = if self.eat(&token::AT) { Some(self.parse_pat(refutable)) } else { None }; @@ -2343,7 +2343,7 @@ pub impl Parser { node: ty_infer, span: mk_sp(lo, lo), }; - if self.eat(token::COLON) { ty = self.parse_ty(false); } + if self.eat(&token::COLON) { ty = self.parse_ty(false); } let init = if allow_init { self.parse_initializer() } else { None }; @spanned( lo, @@ -2362,7 +2362,7 @@ pub impl Parser { let is_mutbl = self.eat_keyword(&~"mut"); let lo = self.span.lo; let mut locals = ~[self.parse_local(is_mutbl, true)]; - while self.eat(token::COMMA) { + while self.eat(&token::COMMA) { locals.push(self.parse_local(is_mutbl, true)); } return @spanned(lo, self.last_span.hi, decl_local(locals)); @@ -2648,9 +2648,9 @@ pub impl Parser { fn parse_optional_ty_param_bounds() -> @~[ty_param_bound] { let mut bounds = ~[]; - if self.eat(token::COLON) { + if self.eat(&token::COLON) { loop { - if self.eat(token::BINOP(token::AND)) { + if self.eat(&token::BINOP(token::AND)) { if self.eat_keyword(&~"static") { bounds.push(RegionTyParamBound); } else { @@ -2695,7 +2695,7 @@ pub impl Parser { break; } - if self.eat(token::BINOP(token::PLUS)) { + if self.eat(&token::BINOP(token::PLUS)) { loop; } @@ -2715,7 +2715,7 @@ pub impl Parser { } fn parse_ty_params() -> ~[ty_param] { - if self.eat(token::LT) { + if self.eat(&token::LT) { let _lifetimes = self.parse_lifetimes(); self.parse_seq_to_gt( Some(token::COMMA), @@ -2849,7 +2849,7 @@ pub impl Parser { fn parse_fn_block_decl() -> fn_decl { let inputs_captures = { - if self.eat(token::OROR) { + if self.eat(&token::OROR) { ~[] } else { self.parse_unspanned_seq( @@ -2858,7 +2858,7 @@ pub impl Parser { |p| p.parse_fn_block_arg()) } }; - let output = if self.eat(token::RARROW) { + let output = if self.eat(&token::RARROW) { self.parse_ty(false) } else { @Ty { id: self.get_id(), node: ty_infer, span: *self.span } @@ -2996,7 +2996,7 @@ pub impl Parser { ty = self.parse_ty(false); opt_trait_ref - } else if self.eat(token::COLON) { + } else if self.eat(&token::COLON) { self.obsolete(*self.span, ObsoleteImplSyntax); Some(self.parse_trait_ref()) } else { @@ -3004,9 +3004,9 @@ pub impl Parser { }; let mut meths = ~[]; - if !self.eat(token::SEMI) { + if !self.eat(&token::SEMI) { self.expect(token::LBRACE); - while !self.eat(token::RBRACE) { + while !self.eat(&token::RBRACE) { meths.push(self.parse_method()); } } @@ -3061,7 +3061,7 @@ pub impl Parser { let class_name = self.parse_value_ident(); self.parse_region_param(); let ty_params = self.parse_ty_params(); - if self.eat(token::COLON) { + if self.eat(&token::COLON) { self.obsolete(*self.span, ObsoleteClassTraits); let _ = self.parse_trait_ref_list(token::LBRACE); } @@ -3070,7 +3070,7 @@ pub impl Parser { let mut the_dtor: Option<(blk, ~[attribute], codemap::span)> = None; let is_tuple_like; - if self.eat(token::LBRACE) { + if self.eat(&token::LBRACE) { // It's a record-like struct. is_tuple_like = false; fields = ~[]; @@ -3113,7 +3113,7 @@ pub impl Parser { @spanned(lo, p.span.hi, struct_field_) }; self.expect(token::SEMI); - } else if self.eat(token::SEMI) { + } else if self.eat(&token::SEMI) { // It's a unit-like struct. is_tuple_like = true; fields = ~[]; @@ -3526,7 +3526,7 @@ pub impl Parser { }; // extern mod { ... } - if items_allowed && self.eat(token::LBRACE) { + if items_allowed && self.eat(&token::LBRACE) { let abi; match abi_opt { Some(found_abi) => abi = found_abi, @@ -3578,7 +3578,7 @@ pub impl Parser { } fn parse_region_param() { - if self.eat(token::BINOP(token::SLASH)) { + if self.eat(&token::BINOP(token::SLASH)) { self.expect(token::BINOP(token::AND)); } } @@ -3659,7 +3659,7 @@ pub impl Parser { needs_comma = false; } else { ident = self.parse_value_ident(); - if self.eat(token::LBRACE) { + if self.eat(&token::LBRACE) { // Parse a struct variant. all_nullary = false; kind = struct_variant_kind(self.parse_struct_def()); @@ -3676,7 +3676,7 @@ pub impl Parser { }); } kind = tuple_variant_kind(args); - } else if self.eat(token::EQ) { + } else if self.eat(&token::EQ) { have_disr = true; disr_expr = Some(self.parse_expr()); kind = tuple_variant_kind(args); @@ -3696,7 +3696,7 @@ pub impl Parser { }; variants.push(spanned(vlo, self.last_span.hi, vr)); - if needs_comma && !self.eat(token::COMMA) { break; } + if needs_comma && !self.eat(&token::COMMA) { break; } } self.expect(token::RBRACE); if (have_disr && !all_nullary) { From f3965829ffd1e2eb4e2b8c4c8f151379f504ed7f Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 10:00:25 -0800 Subject: [PATCH 10/30] libsyntax: change flip_delimiter to take a &token::Token --- src/libsyntax/parse/parser.rs | 8 ++++---- src/libsyntax/parse/token.rs | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index a6834d1b88650..969f5cdb436ec 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1228,7 +1228,7 @@ pub impl Parser { _ => self.fatal(~"expected open delimiter") }; - let ket = token::flip_delimiter(*self.token); + let ket = token::flip_delimiter(&*self.token); let tts = self.parse_unspanned_seq(*self.token, ket, seq_sep_none(), @@ -1433,7 +1433,7 @@ pub impl Parser { } token::LPAREN | token::LBRACE | token::LBRACKET => { // tjc: ?????? - let ket = token::flip_delimiter(*self.token); + let ket = token::flip_delimiter(&*self.token); tt_delim(vec::append( // the open delimiter: ~[parse_any_tt_tok(self)], @@ -1465,7 +1465,7 @@ pub impl Parser { token::LBRACE | token::LPAREN | token::LBRACKET => { self.parse_matcher_subseq(name_idx, *self.token, // tjc: not sure why we need a copy - token::flip_delimiter(*self.token)) + token::flip_delimiter(&*self.token)) } _ => self.fatal(~"expected open delimiter") } @@ -3915,7 +3915,7 @@ pub impl Parser { // eat a matched-delimiter token tree: let tts = match *self.token { token::LPAREN | token::LBRACE => { - let ket = token::flip_delimiter(*self.token); + let ket = token::flip_delimiter(&*self.token); self.parse_unspanned_seq(*self.token, ket, seq_sep_none(), |p| p.parse_token_tree()) diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index f145e433fa7c1..8e88ac1d52545 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -257,8 +257,8 @@ pub pure fn can_begin_expr(t: Token) -> bool { } /// what's the opposite delimiter? -pub fn flip_delimiter(t: token::Token) -> token::Token { - match t { +pub fn flip_delimiter(t: &token::Token) -> token::Token { + match *t { token::LPAREN => token::RPAREN, token::LBRACE => token::RBRACE, token::LBRACKET => token::RBRACKET, From 3635480b157389ce3e65bb5b9ccd0ced8e21e1c7 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 10:20:24 -0800 Subject: [PATCH 11/30] libsyntax: change expect to take &token::Token --- src/libsyntax/ext/pipes/parse_proto.rs | 4 +- src/libsyntax/parse/attr.rs | 6 +- src/libsyntax/parse/common.rs | 25 ++--- src/libsyntax/parse/parser.rs | 131 +++++++++++++------------ 4 files changed, 84 insertions(+), 82 deletions(-) diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs index 66feb7cc753cf..07db67d3173b2 100644 --- a/src/libsyntax/ext/pipes/parse_proto.rs +++ b/src/libsyntax/ext/pipes/parse_proto.rs @@ -39,7 +39,7 @@ pub impl proto_parser for parser::Parser { let id = self.parse_ident(); let name = *self.interner.get(id); - self.expect(token::COLON); + self.expect(&token::COLON); let dir = match *self.token { token::IDENT(n, _) => self.interner.get(n), _ => fail!() @@ -79,7 +79,7 @@ pub impl proto_parser for parser::Parser { } else { ~[] }; - self.expect(token::RARROW); + self.expect(&token::RARROW); let next = match *self.token { token::IDENT(_, _) => { diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 87ecf6a95676b..e5487eaac38bf 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -61,15 +61,15 @@ impl parser_attr for Parser { fn parse_attribute(style: ast::attr_style) -> ast::attribute { let lo = self.span.lo; - self.expect(token::POUND); + self.expect(&token::POUND); return self.parse_attribute_naked(style, lo); } fn parse_attribute_naked(style: ast::attr_style, lo: BytePos) -> ast::attribute { - self.expect(token::LBRACKET); + self.expect(&token::LBRACKET); let meta_item = self.parse_meta_item(); - self.expect(token::RBRACKET); + self.expect(&token::RBRACKET); let mut hi = self.span.hi; return spanned(lo, hi, ast::attribute_ { style: style, value: meta_item, diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index e336b2bb81465..aa4ffb7fc7ab3 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -64,15 +64,16 @@ pub impl Parser { // expect and consume the token t. Signal an error if // the next token is not t. - fn expect(t: token::Token) { - if *self.token == t { + fn expect(t: &token::Token) { + if *self.token == *t { self.bump(); } else { - let mut s: ~str = ~"expected `"; - s += token_to_str(self.reader, t); - s += ~"` but found `"; - s += token_to_str(self.reader, *self.token); - self.fatal(s + ~"`"); + self.fatal( + fmt!("expected `%s` but found `%s`", + token_to_str(self.reader, *t), + token_to_str(self.reader, *self.token) + ) + ) } } @@ -230,7 +231,7 @@ pub impl Parser { match sep { Some(ref t) => { if first { first = false; } - else { self.expect(*t); } + else { self.expect(t); } } _ => () } @@ -252,7 +253,7 @@ pub impl Parser { fn parse_seq_lt_gt(sep: Option, f: fn(Parser) -> T) -> spanned<~[T]> { let lo = self.span.lo; - self.expect(token::LT); + self.expect(&token::LT); let result = self.parse_seq_to_before_gt::(sep, f); let hi = self.span.hi; self.expect_gt(); @@ -280,7 +281,7 @@ pub impl Parser { match sep.sep { Some(ref t) => { if first { first = false; } - else { self.expect(*t); } + else { self.expect(t); } } _ => () } @@ -297,7 +298,7 @@ pub impl Parser { +ket: token::Token, sep: SeqSep, f: fn(Parser) -> T) -> ~[T] { - self.expect(bra); + self.expect(&bra); let result = self.parse_seq_to_before_end::(ket, sep, f); self.bump(); return result; @@ -308,7 +309,7 @@ pub impl Parser { fn parse_seq(bra: token::Token, ket: token::Token, sep: SeqSep, f: fn(Parser) -> T) -> spanned<~[T]> { let lo = self.span.lo; - self.expect(bra); + self.expect(&bra); let result = self.parse_seq_to_before_end::(ket, sep, f); let hi = self.span.hi; self.bump(); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 969f5cdb436ec..da7a0e0e4d329 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -419,7 +419,7 @@ pub impl Parser { */ if self.eat(&token::LT) { let _lifetimes = self.parse_lifetimes(); - self.expect(token::GT); + self.expect(&token::GT); } let inputs = self.parse_unspanned_seq( token::LPAREN, token::RPAREN, @@ -512,7 +512,7 @@ pub impl Parser { let lo = self.span.lo; let mutbl = self.parse_mutability(); let id = self.parse_ident(); - self.expect(token::COLON); + self.expect(&token::COLON); let ty = self.parse_ty(false); spanned( lo, @@ -565,7 +565,7 @@ pub impl Parser { // Parses something like "&x" fn parse_region() -> @region { - self.expect(token::BINOP(token::AND)); + self.expect(&token::BINOP(token::AND)); match *self.token { token::IDENT(sid, _) => { @@ -605,7 +605,7 @@ pub impl Parser { } let t = if ts.len() == 1 && !one_tuple { ts[0].node } else { ty_tup(ts) }; - self.expect(token::RPAREN); + self.expect(&token::RPAREN); t } } else if *self.token == token::AT { @@ -627,7 +627,7 @@ pub impl Parser { } ty_rec(elems) } else if *self.token == token::LBRACKET { - self.expect(token::LBRACKET); + self.expect(&token::LBRACKET); let mt = self.parse_mt(); // Parse the `* 3` in `[ int * 3 ]` @@ -635,7 +635,7 @@ pub impl Parser { None => ty_vec(mt), Some(suffix) => ty_fixed_length_vec(mt, suffix) }; - self.expect(token::RBRACKET); + self.expect(&token::RBRACKET); t } else if *self.token == token::BINOP(token::AND) { self.bump(); @@ -778,7 +778,7 @@ pub impl Parser { m = self.parse_arg_mode(); is_mutbl = self.eat_keyword(&~"mut"); let pat = self.parse_pat(false); - self.expect(token::COLON); + self.expect(&token::COLON); pat } else { m = infer(self.get_id()); @@ -853,7 +853,7 @@ pub impl Parser { token::LIT_FLOAT_UNSUFFIXED(s) => lit_float_unsuffixed(self.id_to_str(s)), token::LIT_STR(s) => lit_str(self.id_to_str(s)), - token::LPAREN => { self.expect(token::RPAREN); lit_nil }, + token::LPAREN => { self.expect(&token::RPAREN); lit_nil }, _ => { self.unexpected_last(tok); } } } @@ -893,7 +893,7 @@ pub impl Parser { if is_not_last { ids.push(parse_ident(self)); - self.expect(token::MOD_SEP); + self.expect(&token::MOD_SEP); } else { ids.push(parse_last_ident(self)); break; @@ -931,7 +931,8 @@ pub impl Parser { // ought to and have to sort it out later. if *self.token == token::BINOP(token::SLASH) && self.look_ahead(1u) == token::BINOP(token::AND) { - self.expect(token::BINOP(token::SLASH)); + + self.expect(&token::BINOP(token::SLASH)); Some(self.parse_region()) } else { None @@ -1041,7 +1042,7 @@ pub impl Parser { let lo = self.span.lo; let m = self.parse_mutability(); let i = self.parse_ident(); - self.expect(sep); + self.expect(&sep); let e = self.parse_expr(); spanned(lo, e.span.hi, ast::field_ { mutbl: m, ident: i, expr: e }) } @@ -1108,7 +1109,7 @@ pub impl Parser { } } hi = self.span.hi; - self.expect(token::RPAREN); + self.expect(&token::RPAREN); return if es.len() == 1 && !one_tuple { self.mk_expr(lo, self.span.hi, expr_paren(es[0])) @@ -1169,7 +1170,7 @@ pub impl Parser { self.bump(); self.bump(); let count = self.parse_expr(); - self.expect(token::RBRACKET); + self.expect(&token::RBRACKET); ex = expr_repeat(first_expr, count, mutbl); } else if *self.token == token::COMMA { // Vector with two or more elements. @@ -1181,19 +1182,19 @@ pub impl Parser { ex = expr_vec(~[first_expr] + remaining_exprs, mutbl); } else { // Vector with one element. - self.expect(token::RBRACKET); + self.expect(&token::RBRACKET); ex = expr_vec(~[first_expr], mutbl); } } hi = self.span.hi; } else if self.eat_keyword(&~"log") { - self.expect(token::LPAREN); + self.expect(&token::LPAREN); let lvl = self.parse_expr(); - self.expect(token::COMMA); + self.expect(&token::COMMA); let e = self.parse_expr(); ex = expr_log(ast::log_other, lvl, e); hi = self.span.hi; - self.expect(token::RPAREN); + self.expect(&token::RPAREN); } else if self.eat_keyword(&~"assert") { let e = self.parse_expr(); ex = expr_assert(e); @@ -1249,7 +1250,7 @@ pub impl Parser { break; } - self.expect(token::COMMA); + self.expect(&token::COMMA); if self.eat(&token::DOTDOT) { base = Some(self.parse_expr()); @@ -1264,7 +1265,7 @@ pub impl Parser { } hi = pth.span.hi; - self.expect(token::RBRACE); + self.expect(&token::RBRACE); ex = expr_struct(pth, fields, base); return self.mk_expr(lo, hi, ex); } @@ -1282,7 +1283,7 @@ pub impl Parser { } fn parse_block_expr(lo: BytePos, blk_mode: blk_check_mode) -> @expr { - self.expect(token::LBRACE); + self.expect(&token::LBRACE); let blk = self.parse_block_tail(lo, blk_mode); return self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk)); } @@ -1308,7 +1309,7 @@ pub impl Parser { hi = self.span.hi; self.bump(); let tys = if self.eat(&token::MOD_SEP) { - self.expect(token::LT); + self.expect(&token::LT); self.parse_seq_to_gt(Some(token::COMMA), |p| p.parse_ty(false)) } else { @@ -1355,7 +1356,7 @@ pub impl Parser { self.bump(); let ix = self.parse_expr(); hi = ix.span.hi; - self.expect(token::RBRACKET); + self.expect(&token::RBRACKET); e = self.mk_expr(lo, hi, expr_index(e, ix)); } @@ -1480,7 +1481,7 @@ pub impl Parser { let mut ret_val = ~[]; let mut lparens = 0u; - self.expect(bra); + self.expect(&bra); while *self.token != ket || lparens > 0u { if *self.token == token::LPAREN { lparens += 1u; } @@ -1510,7 +1511,7 @@ pub impl Parser { match_seq(ms, sep, zerok, name_idx_lo, *name_idx) } else { let bound_to = self.parse_ident(); - self.expect(token::COLON); + self.expect(&token::COLON); let nt_name = self.parse_ident(); let m = match_nonterminal(bound_to, nt_name, *name_idx); *name_idx += 1u; @@ -1882,7 +1883,7 @@ pub impl Parser { let opt_ident; if is_labeled_loop_header { opt_ident = Some(self.parse_ident()); - self.expect(token::COLON); + self.expect(&token::COLON); } else { opt_ident = None; } @@ -1914,7 +1915,7 @@ pub impl Parser { } fn parse_record_literal() -> expr_ { - self.expect(token::LBRACE); + self.expect(&token::LBRACE); let mut fields = ~[self.parse_field(token::COLON)]; let mut base = None; while *self.token != token::RBRACE { @@ -1929,14 +1930,14 @@ pub impl Parser { break; } - self.expect(token::COMMA); + self.expect(&token::COMMA); if *self.token == token::RBRACE { // record ends by an optional trailing comma break; } fields.push(self.parse_field(token::COLON)); } - self.expect(token::RBRACE); + self.expect(&token::RBRACE); self.warn(~"REC"); return expr_rec(fields, base); } @@ -1944,13 +1945,13 @@ pub impl Parser { fn parse_match_expr() -> @expr { let lo = self.last_span.lo; let discriminant = self.parse_expr(); - self.expect(token::LBRACE); + self.expect(&token::LBRACE); let mut arms: ~[arm] = ~[]; while *self.token != token::RBRACE { let pats = self.parse_pats(); let mut guard = None; if self.eat_keyword(&~"if") { guard = Some(self.parse_expr()); } - self.expect(token::FAT_ARROW); + self.expect(&token::FAT_ARROW); let expr = self.parse_expr_res(RESTRICT_STMT_EXPR); let require_comma = @@ -1958,7 +1959,7 @@ pub impl Parser { && *self.token != token::RBRACE; if require_comma { - self.expect(token::COMMA); + self.expect(&token::COMMA); } else { self.eat(&token::COMMA); } @@ -2029,7 +2030,7 @@ pub impl Parser { while *self.token != token::RBRACKET { if first { first = false; } - else { self.expect(token::COMMA); } + else { self.expect(&token::COMMA); } let mut is_tail = false; if *self.token == token::DOTDOT { @@ -2061,7 +2062,7 @@ pub impl Parser { let mut first = true; while *self.token != token::RBRACE { if first { first = false; } - else { self.expect(token::COMMA); } + else { self.expect(&token::COMMA); } if *self.token == token::UNDERSCORE { self.bump(); @@ -2199,9 +2200,9 @@ pub impl Parser { fields.push(self.parse_pat(refutable)); } } - if fields.len() == 1 { self.expect(token::COMMA); } + if fields.len() == 1 { self.expect(&token::COMMA); } hi = self.span.hi; - self.expect(token::RPAREN); + self.expect(&token::RPAREN); pat = pat_tup(fields); } } @@ -2209,7 +2210,7 @@ pub impl Parser { self.bump(); let (elements, tail) = self.parse_pat_vec_elements(refutable); hi = self.span.hi; - self.expect(token::RBRACKET); + self.expect(&token::RBRACKET); pat = ast::pat_vec(elements, tail); } copy tok => { @@ -2271,7 +2272,7 @@ pub impl Parser { // This is a "top constructor only" pat self.bump(); self.bump(); star_pat = true; - self.expect(token::RPAREN); + self.expect(&token::RPAREN); } _ => { args = self.parse_unspanned_seq( @@ -2379,7 +2380,7 @@ pub impl Parser { self.fatal(~"expected ident"); } let name = self.parse_ident(); - self.expect(token::COLON); + self.expect(&token::COLON); let ty = self.parse_ty(false); @spanned(lo, self.last_span.hi, ast::struct_field_ { kind: named_field(name, is_mutbl, pr), @@ -2498,7 +2499,7 @@ pub impl Parser { if self.eat_keyword(&~"unsafe") { self.obsolete(*self.span, ObsoleteUnsafeBlock); } - self.expect(token::LBRACE); + self.expect(&token::LBRACE); let (inner, next) = maybe_parse_inner_attrs_and_next(self, parse_attrs); return (inner, self.parse_block_tail_(lo, default_blk, next)); @@ -2603,7 +2604,7 @@ pub impl Parser { stmts.push(stmt); if classify::stmt_ends_with_semi(*stmt) { - self.expect(token::SEMI); + self.expect(&token::SEMI); } } } @@ -2778,7 +2779,7 @@ pub impl Parser { } } - self.expect(token::LPAREN); + self.expect(&token::LPAREN); // A bit of complexity and lookahead is needed here in order to to be // backwards compatible. @@ -2831,7 +2832,7 @@ pub impl Parser { parse_arg_fn); } - self.expect(token::RPAREN); + self.expect(&token::RPAREN); let hi = self.span.hi; @@ -3005,7 +3006,7 @@ pub impl Parser { let mut meths = ~[]; if !self.eat(&token::SEMI) { - self.expect(token::LBRACE); + self.expect(&token::LBRACE); while !self.eat(&token::RBRACE) { meths.push(self.parse_method()); } @@ -3112,7 +3113,7 @@ pub impl Parser { }; @spanned(lo, p.span.hi, struct_field_) }; - self.expect(token::SEMI); + self.expect(&token::SEMI); } else if self.eat(&token::SEMI) { // It's a unit-like struct. is_tuple_like = true; @@ -3270,11 +3271,11 @@ pub impl Parser { fn parse_item_const() -> item_info { let id = self.parse_value_ident(); - self.expect(token::COLON); + self.expect(&token::COLON); let ty = self.parse_ty(false); - self.expect(token::EQ); + self.expect(&token::EQ); let e = self.parse_expr(); - self.expect(token::SEMI); + self.expect(&token::SEMI); (id, item_const(ty, e), None) } @@ -3288,10 +3289,10 @@ pub impl Parser { (id, m, Some(attrs)) } else { self.push_mod_path(id, outer_attrs); - self.expect(token::LBRACE); + self.expect(&token::LBRACE); let (inner, next) = self.parse_inner_attrs_and_next(); let m = self.parse_mod_items(token::RBRACE, next); - self.expect(token::RBRACE); + self.expect(&token::RBRACE); self.pop_mod_path(); (id, item_mod(m), Some(inner)) }; @@ -3404,7 +3405,7 @@ pub impl Parser { let (ident, tps) = self.parse_fn_header(); let decl = self.parse_fn_decl(|p| p.parse_arg()); let mut hi = self.span.hi; - self.expect(token::SEMI); + self.expect(&token::SEMI); @ast::foreign_item { ident: ident, attrs: attrs, node: foreign_item_fn(decl, purity, tps), @@ -3418,10 +3419,10 @@ pub impl Parser { let lo = self.span.lo; self.expect_keyword(&~"const"); let ident = self.parse_ident(); - self.expect(token::COLON); + self.expect(&token::COLON); let ty = self.parse_ty(false); let hi = self.span.hi; - self.expect(token::SEMI); + self.expect(&token::SEMI); @ast::foreign_item { ident: ident, attrs: attrs, node: foreign_item_const(ty), @@ -3535,7 +3536,7 @@ pub impl Parser { let (inner, next) = self.parse_inner_attrs_and_next(); let m = self.parse_foreign_mod_items(sort, abi, next); - self.expect(token::RBRACE); + self.expect(&token::RBRACE); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_foreign_mod(m), visibility, @@ -3552,7 +3553,7 @@ pub impl Parser { // extern mod foo; let metadata = self.parse_optional_meta(); - self.expect(token::SEMI); + self.expect(&token::SEMI); iovi_view_item(@ast::view_item { node: view_item_extern_mod(ident, metadata, self.get_id()), attrs: attrs, @@ -3571,15 +3572,15 @@ pub impl Parser { let (_, ident) = self.parse_type_decl(); self.parse_region_param(); let tps = self.parse_ty_params(); - self.expect(token::EQ); + self.expect(&token::EQ); let ty = self.parse_ty(false); - self.expect(token::SEMI); + self.expect(&token::SEMI); (ident, item_ty(ty, tps), None) } fn parse_region_param() { if self.eat(&token::BINOP(token::SLASH)) { - self.expect(token::BINOP(token::AND)); + self.expect(&token::BINOP(token::AND)); } } @@ -3641,7 +3642,7 @@ pub impl Parser { if common_fields.is_some() { self.fatal(~"duplicate declaration of shared fields"); } - self.expect(token::LBRACE); + self.expect(&token::LBRACE); common_fields = Some(self.parse_struct_def()); loop; } @@ -3653,7 +3654,7 @@ pub impl Parser { let mut args = ~[], disr_expr = None; if self.eat_keyword(&~"enum") { ident = self.parse_ident(); - self.expect(token::LBRACE); + self.expect(&token::LBRACE); let nested_enum_def = self.parse_enum_def(ty_params); kind = enum_variant_kind(nested_enum_def); needs_comma = false; @@ -3698,7 +3699,7 @@ pub impl Parser { if needs_comma && !self.eat(&token::COMMA) { break; } } - self.expect(token::RBRACE); + self.expect(&token::RBRACE); if (have_disr && !all_nullary) { self.fatal(~"discriminator values can only be used with a c-like \ enum"); @@ -3715,7 +3716,7 @@ pub impl Parser { if *self.token == token::EQ { self.bump(); let ty = self.parse_ty(false); - self.expect(token::SEMI); + self.expect(&token::SEMI); let variant = spanned(ty.span.lo, ty.span.hi, ast::variant_ { name: id, attrs: ~[], @@ -3737,7 +3738,7 @@ pub impl Parser { None ); } - self.expect(token::LBRACE); + self.expect(&token::LBRACE); let enum_definition = self.parse_enum_def(ty_params); (id, item_enum(enum_definition, ty_params), None) @@ -3883,7 +3884,7 @@ pub impl Parser { } else if self.eat_keyword(&~"use") { // USE ITEM let view_item = self.parse_use(); - self.expect(token::SEMI); + self.expect(&token::SEMI); return iovi_view_item(@ast::view_item { node: view_item, attrs: attrs, @@ -3902,7 +3903,7 @@ pub impl Parser { // item macro. let pth = self.parse_path_without_tps(); - self.expect(token::NOT); + self.expect(&token::NOT); // a 'special' identifier (like what `macro_rules!` uses) // is optional. We should eventually unify invoc syntax @@ -4082,7 +4083,7 @@ pub impl Parser { } else { self.bug(~"expected view item"); }; - self.expect(token::SEMI); + self.expect(&token::SEMI); @ast::view_item { node: node, attrs: attrs, vis: vis, From 272c25e9383be490c5fc67cb09773fe2563ef3a7 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 15:41:54 -0800 Subject: [PATCH 12/30] libsyntax: minor cleanup --- src/libsyntax/parse/attr.rs | 9 +- src/libsyntax/parse/common.rs | 133 ++++++++++++------- src/libsyntax/parse/parser.rs | 242 +++++++++++++++++++++------------- 3 files changed, 246 insertions(+), 138 deletions(-) diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index e5487eaac38bf..f7b115912da23 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -151,9 +151,12 @@ impl parser_attr for Parser { } fn parse_meta_seq() -> ~[@ast::meta_item] { - return self.parse_seq(token::LPAREN, token::RPAREN, - seq_sep_trailing_disallowed(token::COMMA), - |p| p.parse_meta_item()).node; + self.parse_seq( + token::LPAREN, + token::RPAREN, + seq_sep_trailing_disallowed(token::COMMA), + |p| p.parse_meta_item() + ).node } fn parse_optional_meta() -> ~[@ast::meta_item] { diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index aa4ffb7fc7ab3..c7f029e9733db 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -29,20 +29,20 @@ pub struct SeqSep { pub fn seq_sep_trailing_disallowed(t: token::Token) -> SeqSep { SeqSep { - sep: option::Some(t), - trailing_sep_allowed: false + sep: Some(t), + trailing_sep_allowed: false, } } pub fn seq_sep_trailing_allowed(t: token::Token) -> SeqSep { SeqSep { - sep: option::Some(t), - trailing_sep_allowed: true + sep: Some(t), + trailing_sep_allowed: true, } } pub fn seq_sep_none() -> SeqSep { SeqSep { - sep: option::None, - trailing_sep_allowed: false + sep: None, + trailing_sep_allowed: false, } } @@ -54,12 +54,20 @@ pub impl Parser { fn unexpected_last(t: token::Token) -> ! { self.span_fatal( *self.last_span, - ~"unexpected token: `" + token_to_str(self.reader, t) + ~"`"); + fmt!( + "unexpected token: `%s`", + token_to_str(self.reader, t) + ) + ); } fn unexpected() -> ! { - self.fatal(~"unexpected token: `" - + token_to_str(self.reader, *self.token) + ~"`"); + self.fatal( + fmt!( + "unexpected token: `%s`", + token_to_str(self.reader, *self.token) + ) + ); } // expect and consume the token t. Signal an error if @@ -81,12 +89,23 @@ pub impl Parser { self.check_strict_keywords(); self.check_reserved_keywords(); match *self.token { - token::IDENT(i, _) => { self.bump(); return i; } - token::INTERPOLATED(token::nt_ident(*)) => { self.bug( - ~"ident interpolation not converted to real token"); } - _ => { self.fatal(~"expected ident, found `" - + token_to_str(self.reader, *self.token) - + ~"`"); } + token::IDENT(i, _) => { + self.bump(); + i + } + token::INTERPOLATED(token::nt_ident(*)) => { + self.bug( + ~"ident interpolation not converted to real token" + ); + } + _ => { + self.fatal( + fmt!( + "expected ident, found `%s`", + token_to_str(self.reader, *self.token) + ) + ); + } } } @@ -155,9 +174,13 @@ pub impl Parser { fn expect_keyword(word: &~str) { self.require_keyword(word); if !self.eat_keyword(word) { - self.fatal(~"expected `" + *word + ~"`, found `" + - token_to_str(self.reader, *self.token) + - ~"`"); + self.fatal( + fmt!( + "expected `%s`, found `%s`", + *word, + token_to_str(self.reader, *self.token) + ) + ); } } @@ -177,7 +200,7 @@ pub impl Parser { fn check_strict_keywords_(w: &~str) { if self.is_strict_keyword(w) { - self.fatal(~"found `" + *w + ~"` in ident position"); + self.fatal(fmt!("found `%s` in ident position", *w)); } } @@ -197,7 +220,7 @@ pub impl Parser { fn check_reserved_keywords_(w: &~str) { if self.is_reserved_keyword(w) { - self.fatal(~"`" + *w + ~"` is a reserved keyword"); + self.fatal(fmt!("`%s` is a reserved keyword", *w)); } } @@ -207,9 +230,11 @@ pub impl Parser { if *self.token == token::GT { self.bump(); } else if *self.token == token::BINOP(token::SHR) { - self.replace_token(token::GT, - self.span.lo + BytePos(1u), - self.span.hi); + self.replace_token( + token::GT, + self.span.lo + BytePos(1u), + self.span.hi + ); } else { let mut s: ~str = ~"expected `"; s += token_to_str(self.reader, token::GT); @@ -222,8 +247,10 @@ pub impl Parser { // parse a sequence bracketed by '<' and '>', stopping // before the '>'. - fn parse_seq_to_before_gt(sep: Option, - f: fn(Parser) -> T) -> ~[T] { + fn parse_seq_to_before_gt( + sep: Option, + f: fn(Parser) -> T + ) -> ~[T] { let mut first = true; let mut v = ~[]; while *self.token != token::GT @@ -241,8 +268,10 @@ pub impl Parser { return v; } - fn parse_seq_to_gt(sep: Option, - f: fn(Parser) -> T) -> ~[T] { + fn parse_seq_to_gt( + sep: Option, + f: fn(Parser) -> T + ) -> ~[T] { let v = self.parse_seq_to_before_gt(sep, f); self.expect_gt(); @@ -250,8 +279,10 @@ pub impl Parser { } // parse a sequence bracketed by '<' and '>' - fn parse_seq_lt_gt(sep: Option, - f: fn(Parser) -> T) -> spanned<~[T]> { + fn parse_seq_lt_gt( + sep: Option, + f: fn(Parser) -> T + ) -> spanned<~[T]> { let lo = self.span.lo; self.expect(&token::LT); let result = self.parse_seq_to_before_gt::(sep, f); @@ -263,18 +294,24 @@ pub impl Parser { // parse a sequence, including the closing delimiter. The function // f must consume tokens until reaching the next separator or // closing bracket. - fn parse_seq_to_end(ket: token::Token, sep: SeqSep, - f: fn(Parser) -> T) -> ~[T] { + fn parse_seq_to_end( + ket: token::Token, + sep: SeqSep, + f: fn(Parser) -> T + ) -> ~[T] { let val = self.parse_seq_to_before_end(ket, sep, f); self.bump(); - return val; + val } // parse a sequence, not including the closing delimiter. The function // f must consume tokens until reaching the next separator or // closing bracket. - fn parse_seq_to_before_end(ket: token::Token, sep: SeqSep, - f: fn(Parser) -> T) -> ~[T] { + fn parse_seq_to_before_end( + ket: token::Token, + sep: SeqSep, + f: fn(Parser) -> T + ) -> ~[T] { let mut first: bool = true; let mut v: ~[T] = ~[]; while *self.token != ket { @@ -288,31 +325,37 @@ pub impl Parser { if sep.trailing_sep_allowed && *self.token == ket { break; } v.push(f(self)); } - return v; + v } // parse a sequence, including the closing delimiter. The function // f must consume tokens until reaching the next separator or // closing bracket. - fn parse_unspanned_seq(+bra: token::Token, - +ket: token::Token, - sep: SeqSep, - f: fn(Parser) -> T) -> ~[T] { + fn parse_unspanned_seq( + +bra: token::Token, + +ket: token::Token, + sep: SeqSep, + f: fn(Parser) -> T + ) -> ~[T] { self.expect(&bra); - let result = self.parse_seq_to_before_end::(ket, sep, f); + let result = self.parse_seq_to_before_end(ket, sep, f); self.bump(); - return result; + result } // NB: Do not use this function unless you actually plan to place the // spanned list in the AST. - fn parse_seq(bra: token::Token, ket: token::Token, sep: SeqSep, - f: fn(Parser) -> T) -> spanned<~[T]> { + fn parse_seq( + +bra: token::Token, + +ket: token::Token, + sep: SeqSep, + f: fn(Parser) -> T + ) -> spanned<~[T]> { let lo = self.span.lo; self.expect(&bra); - let result = self.parse_seq_to_before_end::(ket, sep, f); + let result = self.parse_seq_to_before_end(ket, sep, f); let hi = self.span.hi; self.bump(); - return spanned(lo, hi, result); + spanned(lo, hi, result) } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index da7a0e0e4d329..36729dcb5ac26 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -422,16 +422,21 @@ pub impl Parser { self.expect(&token::GT); } let inputs = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, + token::LPAREN, + token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), - |p| p.parse_arg_general(false)); + |p| p.parse_arg_general(false) + ); let (ret_style, ret_ty) = self.parse_ret_ty(); ast::fn_decl { inputs: inputs, output: ret_ty, cf: ret_style } } fn parse_trait_methods() -> ~[trait_method] { - do self.parse_unspanned_seq(token::LBRACE, token::RBRACE, - seq_sep_none()) |p| { + do self.parse_unspanned_seq( + token::LBRACE, + token::RBRACE, + seq_sep_none() + ) |p| { let attrs = p.parse_outer_attributes(); let lo = p.span.lo; let is_static = p.parse_staticness(); @@ -619,9 +624,11 @@ pub impl Parser { ty_ptr(self.parse_mt()) } else if *self.token == token::LBRACE { let elems = self.parse_unspanned_seq( - token::LBRACE, token::RBRACE, + token::LBRACE, + token::RBRACE, seq_sep_trailing_allowed(token::COMMA), - |p| p.parse_ty_field()); + |p| p.parse_ty_field() + ); if vec::len(elems) == 0u { self.unexpected_last(token::RBRACE); } @@ -1175,10 +1182,11 @@ pub impl Parser { } else if *self.token == token::COMMA { // Vector with two or more elements. self.bump(); - let remaining_exprs = - self.parse_seq_to_end(token::RBRACKET, - seq_sep_trailing_allowed(token::COMMA), - |p| p.parse_expr()); + let remaining_exprs = self.parse_seq_to_end( + token::RBRACKET, + seq_sep_trailing_allowed(token::COMMA), + |p| p.parse_expr() + ); ex = expr_vec(~[first_expr] + remaining_exprs, mutbl); } else { // Vector with one element. @@ -1230,10 +1238,12 @@ pub impl Parser { }; let ket = token::flip_delimiter(&*self.token); - let tts = self.parse_unspanned_seq(*self.token, - ket, - seq_sep_none(), - |p| p.parse_token_tree()); + let tts = self.parse_unspanned_seq( + *self.token, + ket, + seq_sep_none(), + |p| p.parse_token_tree() + ); let hi = self.span.hi; return self.mk_mac_expr(lo, hi, mac_invoc_tt(pth, tts)); @@ -1310,8 +1320,10 @@ pub impl Parser { self.bump(); let tys = if self.eat(&token::MOD_SEP) { self.expect(&token::LT); - self.parse_seq_to_gt(Some(token::COMMA), - |p| p.parse_ty(false)) + self.parse_seq_to_gt( + Some(token::COMMA), + |p| p.parse_ty(false) + ) } else { ~[] }; @@ -1320,9 +1332,11 @@ pub impl Parser { match *self.token { token::LPAREN if self.permits_call() => { let es = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, + token::LPAREN, + token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), - |p| p.parse_expr()); + |p| p.parse_expr() + ); hi = self.span.hi; let nd = expr_method_call(e, i, tys, es, NoSugar); @@ -1342,9 +1356,11 @@ pub impl Parser { // expr(...) token::LPAREN if self.permits_call() => { let es = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, + token::LPAREN, + token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), - |p| p.parse_expr()); + |p| p.parse_expr() + ); hi = self.span.hi; let nd = expr_call(e, es, NoSugar); @@ -1373,7 +1389,7 @@ pub impl Parser { || *self.token == token::BINOP(token::PLUS) { let zerok = *self.token == token::BINOP(token::STAR); self.bump(); - return (None, zerok); + (None, zerok) } else { let sep = *self.token; self.bump(); @@ -1381,7 +1397,7 @@ pub impl Parser { || *self.token == token::BINOP(token::PLUS) { let zerok = *self.token == token::BINOP(token::STAR); self.bump(); - return (Some(sep), zerok); + (Some(sep), zerok) } else { self.fatal(~"expected `*` or `+`"); } @@ -1397,8 +1413,12 @@ pub impl Parser { match *p.token { token::RPAREN | token::RBRACE | token::RBRACKET => { - p.fatal(~"incorrect close delimiter: `" - + token_to_str(p.reader, *p.token) + ~"`"); + p.fatal( + fmt!( + "incorrect close delimiter: `%s`", + token_to_str(p.reader, *p.token) + ) + ); } /* we ought to allow different depths of unquotation */ token::DOLLAR if *p.quote_depth > 0u => { @@ -1406,9 +1426,12 @@ pub impl Parser { let sp = *p.span; if *p.token == token::LPAREN { - let seq = p.parse_seq(token::LPAREN, token::RPAREN, - seq_sep_none(), - |p| p.parse_token_tree()); + let seq = p.parse_seq( + token::LPAREN, + token::RPAREN, + seq_sep_none(), + |p| p.parse_token_tree() + ); let (s, z) = p.parse_sep_and_zerok(); tt_seq(mk_sp(sp.lo ,p.span.hi), seq.node, s, z) } else { @@ -1429,23 +1452,29 @@ pub impl Parser { } match *self.token { - token::EOF => { + token::EOF => { self.fatal(~"file ended in the middle of a macro invocation"); - } - token::LPAREN | token::LBRACE | token::LBRACKET => { - // tjc: ?????? - let ket = token::flip_delimiter(&*self.token); - tt_delim(vec::append( - // the open delimiter: - ~[parse_any_tt_tok(self)], - vec::append( - self.parse_seq_to_before_end( - ket, seq_sep_none(), - |p| p.parse_token_tree()), - // the close delimiter: - ~[parse_any_tt_tok(self)]))) - } - _ => parse_non_delim_tt_tok(self) + } + token::LPAREN | token::LBRACE | token::LBRACKET => { + // tjc: ?????? + let ket = token::flip_delimiter(&*self.token); + tt_delim( + vec::append( + // the open delimiter: + ~[parse_any_tt_tok(self)], + vec::append( + self.parse_seq_to_before_end( + ket, + seq_sep_none(), + |p| p.parse_token_tree() + ), + // the close delimiter: + ~[parse_any_tt_tok(self)] + ) + ) + ) + } + _ => parse_non_delim_tt_tok(self) } } @@ -1462,13 +1491,16 @@ pub impl Parser { // the interpolation of matchers maybe_whole!(self, nt_matchers); let name_idx = @mut 0u; - return match *self.token { - token::LBRACE | token::LPAREN | token::LBRACKET => { - self.parse_matcher_subseq(name_idx, *self.token, - // tjc: not sure why we need a copy - token::flip_delimiter(&*self.token)) - } - _ => self.fatal(~"expected open delimiter") + match *self.token { + token::LBRACE | token::LPAREN | token::LBRACKET => { + self.parse_matcher_subseq( + name_idx, + *self.token, + // tjc: not sure why we need a copy + token::flip_delimiter(&*self.token) + ) + } + _ => self.fatal(~"expected open delimiter") } } @@ -1476,8 +1508,11 @@ pub impl Parser { // This goofy function is necessary to correctly match parens in matchers. // Otherwise, `$( ( )` would be a valid matcher, and `$( () )` would be // invalid. It's similar to common::parse_seq. - fn parse_matcher_subseq(name_idx: @mut uint, bra: token::Token, - ket: token::Token) -> ~[matcher] { + fn parse_matcher_subseq( + name_idx: @mut uint, + bra: token::Token, + ket: token::Token + ) -> ~[matcher] { let mut ret_val = ~[]; let mut lparens = 0u; @@ -1501,9 +1536,11 @@ pub impl Parser { self.bump(); if *self.token == token::LPAREN { let name_idx_lo = *name_idx; - let ms = self.parse_matcher_subseq(name_idx, - token::LPAREN, - token::RPAREN); + let ms = self.parse_matcher_subseq( + name_idx, + token::LPAREN, + token::RPAREN + ); if ms.len() == 0u { self.fatal(~"repetition body must be nonempty"); } @@ -2276,10 +2313,13 @@ pub impl Parser { } _ => { args = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, - seq_sep_trailing_disallowed - (token::COMMA), - |p| p.parse_pat(refutable)); + token::LPAREN, + token::RPAREN, + seq_sep_trailing_disallowed( + token::COMMA + ), + |p| p.parse_pat(refutable) + ); } }, _ => () @@ -2423,8 +2463,11 @@ pub impl Parser { }; let tts = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, seq_sep_none(), - |p| p.parse_token_tree()); + token::LPAREN, + token::RPAREN, + seq_sep_none(), + |p| p.parse_token_tree() + ); let hi = self.span.hi; if id == token::special_idents::invalid { @@ -2720,7 +2763,8 @@ pub impl Parser { let _lifetimes = self.parse_lifetimes(); self.parse_seq_to_gt( Some(token::COMMA), - |p| p.parse_ty_param()) + |p| p.parse_ty_param() + ) } else { ~[] } } @@ -2729,8 +2773,11 @@ pub impl Parser { { let args_or_capture_items: ~[arg_or_capture_item] = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, - seq_sep_trailing_disallowed(token::COMMA), parse_arg_fn); + token::LPAREN, + token::RPAREN, + seq_sep_trailing_disallowed(token::COMMA), + parse_arg_fn + ); let inputs = either::lefts(args_or_capture_items); @@ -2810,10 +2857,11 @@ pub impl Parser { token::COMMA => { self.bump(); let sep = seq_sep_trailing_disallowed(token::COMMA); - args_or_capture_items = - self.parse_seq_to_before_end(token::RPAREN, - sep, - parse_arg_fn); + args_or_capture_items = self.parse_seq_to_before_end( + token::RPAREN, + sep, + parse_arg_fn + ); } token::RPAREN => { args_or_capture_items = ~[]; @@ -2826,10 +2874,11 @@ pub impl Parser { } } else { let sep = seq_sep_trailing_disallowed(token::COMMA); - args_or_capture_items = - self.parse_seq_to_before_end(token::RPAREN, - sep, - parse_arg_fn); + args_or_capture_items = self.parse_seq_to_before_end( + token::RPAREN, + sep, + parse_arg_fn + ); } self.expect(&token::RPAREN); @@ -2854,9 +2903,11 @@ pub impl Parser { ~[] } else { self.parse_unspanned_seq( - token::BINOP(token::OR), token::BINOP(token::OR), + token::BINOP(token::OR), + token::BINOP(token::OR), seq_sep_trailing_disallowed(token::COMMA), - |p| p.parse_fn_block_arg()) + |p| p.parse_fn_block_arg() + ) } }; let output = if self.eat(&token::RARROW) { @@ -3054,8 +3105,10 @@ pub impl Parser { fn parse_trait_ref_list(ket: token::Token) -> ~[@trait_ref] { self.parse_seq_to_before_end( - ket, seq_sep_none(), - |p| p.parse_trait_ref()) + ket, + seq_sep_none(), + |p| p.parse_trait_ref() + ) } fn parse_item_struct() -> item_info { @@ -3102,9 +3155,11 @@ pub impl Parser { } else if *self.token == token::LPAREN { // It's a tuple-like struct. is_tuple_like = true; - fields = do self.parse_unspanned_seq(token::LPAREN, token::RPAREN, - seq_sep_trailing_allowed - (token::COMMA)) |p| { + fields = do self.parse_unspanned_seq( + token::LPAREN, + token::RPAREN, + seq_sep_trailing_allowed(token::COMMA) + ) |p| { let lo = p.span.lo; let struct_field_ = ast::struct_field_ { kind: unnamed_field, @@ -3667,9 +3722,11 @@ pub impl Parser { } else if *self.token == token::LPAREN { all_nullary = false; let arg_tys = self.parse_unspanned_seq( - token::LPAREN, token::RPAREN, + token::LPAREN, + token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), - |p| p.parse_ty(false)); + |p| p.parse_ty(false) + ); for arg_tys.each |ty| { args.push(ast::variant_arg { ty: *ty, @@ -3915,13 +3972,16 @@ pub impl Parser { }; // eat a matched-delimiter token tree: let tts = match *self.token { - token::LPAREN | token::LBRACE => { - let ket = token::flip_delimiter(&*self.token); - self.parse_unspanned_seq(*self.token, ket, - seq_sep_none(), - |p| p.parse_token_tree()) - } - _ => self.fatal(~"expected open delimiter") + token::LPAREN | token::LBRACE => { + let ket = token::flip_delimiter(&*self.token); + self.parse_unspanned_seq( + *self.token, + ket, + seq_sep_none(), + |p| p.parse_token_tree() + ) + } + _ => self.fatal(~"expected open delimiter") }; // single-variant-enum... : let m = ast::mac_invoc_tt(pth, tts); @@ -4007,9 +4067,11 @@ pub impl Parser { // foo::bar::{a,b,c} token::LBRACE => { let idents = self.parse_unspanned_seq( - token::LBRACE, token::RBRACE, + token::LBRACE, + token::RBRACE, seq_sep_trailing_allowed(token::COMMA), - |p| p.parse_path_list_ident()); + |p| p.parse_path_list_ident() + ); let path = @ast::path { span: mk_sp(lo, self.span.hi), global: false, idents: path, From 752befe2a6401108f27ff0141bdd73baac44c41c Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 10:52:06 -0800 Subject: [PATCH 13/30] libsyntax: change token_is_{word,keyword} to take &Token --- src/libsyntax/parse/common.rs | 12 ++++++------ src/libsyntax/parse/parser.rs | 32 ++++++++++++++++---------------- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index c7f029e9733db..91abd804fb3ed 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -136,24 +136,24 @@ pub impl Parser { } } - fn token_is_word(word: &~str, tok: token::Token) -> bool { - match tok { + fn token_is_word(word: &~str, tok: &token::Token) -> bool { + match *tok { token::IDENT(sid, false) => { *self.id_to_str(sid) == *word } _ => { false } } } - fn token_is_keyword(word: &~str, ++tok: token::Token) -> bool { + fn token_is_keyword(word: &~str, tok: &token::Token) -> bool { self.require_keyword(word); self.token_is_word(word, tok) } fn is_keyword(word: &~str) -> bool { - self.token_is_keyword(word, *self.token) + self.token_is_keyword(word, &*self.token) } - fn is_any_keyword(tok: token::Token) -> bool { - match tok { + fn is_any_keyword(tok: &token::Token) -> bool { + match *tok { token::IDENT(sid, false) => { self.keywords.contains_key(self.id_to_str(sid)) } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 36729dcb5ac26..7a0f248cd3df0 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -309,7 +309,7 @@ pub impl Parser { pure fn id_to_str(id: ident) -> @~str { self.sess.interner.get(id) } - fn token_is_closure_keyword(+tok: token::Token) -> bool { + fn token_is_closure_keyword(tok: &token::Token) -> bool { self.token_is_keyword(&~"pure", tok) || self.token_is_keyword(&~"unsafe", tok) || self.token_is_keyword(&~"once", tok) || @@ -649,7 +649,7 @@ pub impl Parser { self.parse_borrowed_pointee() } else if self.eat_keyword(&~"extern") { self.parse_ty_bare_fn() - } else if self.token_is_closure_keyword(*self.token) { + } else if self.token_is_closure_keyword(&*self.token) { self.parse_ty_closure(None, None) } else if *self.token == token::MOD_SEP || is_ident_or_path(*self.token) { @@ -676,12 +676,12 @@ pub impl Parser { token::IDENT(rname, _) => { if self.look_ahead(1u) == token::BINOP(token::SLASH) && - self.token_is_closure_keyword(self.look_ahead(2u)) + self.token_is_closure_keyword(&self.look_ahead(2u)) { self.bump(); self.bump(); return self.parse_ty_closure(Some(sigil), Some(rname)); - } else if self.token_is_closure_keyword(*self.token) { + } else if self.token_is_closure_keyword(&*self.token) { return self.parse_ty_closure(Some(sigil), None); } } @@ -716,7 +716,7 @@ pub impl Parser { _ => { None } }; - if self.token_is_closure_keyword(*self.token) { + if self.token_is_closure_keyword(&*self.token) { return self.parse_ty_closure(Some(BorrowedSigil), rname); } @@ -1912,7 +1912,7 @@ pub impl Parser { // labeled loop headers look like 'loop foo: {' let is_labeled_loop_header = is_ident(*self.token) - && !self.is_any_keyword(*self.token) + && !self.is_any_keyword(&*self.token) && self.look_ahead(1) == token::COLON; if is_loop_header || is_labeled_loop_header { @@ -1946,7 +1946,7 @@ pub impl Parser { fn looking_at_record_literal() -> bool { let lookahead = self.look_ahead(1); *self.token == token::LBRACE && - (self.token_is_keyword(&~"mut", lookahead) || + (self.token_is_keyword(&~"mut", &lookahead) || (is_plain_ident(lookahead) && self.look_ahead(2) == token::COLON)) } @@ -2446,7 +2446,7 @@ pub impl Parser { let decl = self.parse_let(); return @spanned(lo, decl.span.hi, stmt_decl(decl, self.get_id())); } else if is_ident(*self.token) - && !self.is_any_keyword(*self.token) + && !self.is_any_keyword(&*self.token) && self.look_ahead(1) == token::NOT { check_expected_item(self, first_item_attrs); @@ -2812,10 +2812,10 @@ pub impl Parser { fn maybe_parse_self_ty(cnstr: fn(+v: mutability) -> ast::self_ty_, p: Parser) -> ast::self_ty_ { // We need to make sure it isn't a mode or a type - if p.token_is_keyword(&~"self", p.look_ahead(1)) || - ((p.token_is_keyword(&~"const", p.look_ahead(1)) || - p.token_is_keyword(&~"mut", p.look_ahead(1))) && - p.token_is_keyword(&~"self", p.look_ahead(2))) { + if p.token_is_keyword(&~"self", &p.look_ahead(1)) || + ((p.token_is_keyword(&~"const", &p.look_ahead(1)) || + p.token_is_keyword(&~"mut", &p.look_ahead(1))) && + p.token_is_keyword(&~"self", &p.look_ahead(2))) { p.bump(); let mutability = p.parse_mutability(); @@ -3948,7 +3948,7 @@ pub impl Parser { vis: visibility, span: mk_sp(lo, self.last_span.hi) }); - } else if macros_allowed && !self.is_any_keyword(*self.token) + } else if macros_allowed && !self.is_any_keyword(&*self.token) && self.look_ahead(1) == token::NOT && (is_plain_ident(self.look_ahead(2)) || self.look_ahead(2) == token::LPAREN @@ -4127,9 +4127,9 @@ pub impl Parser { tok = self.look_ahead(1); next_tok = self.look_ahead(2); }; - self.token_is_keyword(&~"use", tok) - || (self.token_is_keyword(&~"extern", tok) && - self.token_is_keyword(&~"mod", next_tok)) + self.token_is_keyword(&~"use", &tok) + || (self.token_is_keyword(&~"extern", &tok) && + self.token_is_keyword(&~"mod", &next_tok)) } // parse a view item. From bff22cf1665e98a1c3feb60e1c23fc30a4120934 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 18:32:02 -0800 Subject: [PATCH 14/30] libsyntax: add some explicit copies --- src/libsyntax/ast_map.rs | 15 +++-- src/libsyntax/ast_util.rs | 4 +- src/libsyntax/ext/pipes/parse_proto.rs | 56 ++++++++++++------- src/libsyntax/ext/tt/macro_parser.rs | 2 +- src/libsyntax/fold.rs | 2 +- src/libsyntax/parse/common.rs | 4 +- src/libsyntax/parse/obsolete.rs | 2 +- src/libsyntax/parse/parser.rs | 77 ++++++++++++++------------ 8 files changed, 94 insertions(+), 68 deletions(-) diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index b56dfeffd7a77..48fe0ca5b2d7f 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -60,8 +60,8 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: ~str, itr: @ident_interner) -> ~str { let strs = do p.map |e| { match *e { - path_mod(s) => *itr.get(s), - path_name(s) => *itr.get(s) + path_mod(s) => copy *itr.get(s), + path_name(s) => copy *itr.get(s) } }; str::connect(strs, sep) @@ -70,7 +70,7 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: ~str, itr: @ident_interner) pub fn path_ident_to_str(p: path, i: ident, itr: @ident_interner) -> ~str { if vec::is_empty(p) { //FIXME /* FIXME (#2543) */ copy *i - *itr.get(i) + copy *itr.get(i) } else { fmt!("%s::%s", path_to_str(p, itr), *itr.get(i)) } @@ -82,8 +82,8 @@ pub fn path_to_str(p: &[path_elt], itr: @ident_interner) -> ~str { pub fn path_elt_to_str(pe: path_elt, itr: @ident_interner) -> ~str { match pe { - path_mod(s) => *itr.get(s), - path_name(s) => *itr.get(s) + path_mod(s) => copy *itr.get(s), + path_name(s) => copy *itr.get(s) } } @@ -310,7 +310,10 @@ pub fn map_item(i: @item, &&cx: @mut Ctx, v: visit::vt<@mut Ctx>) { for methods.each |tm| { let id = ast_util::trait_method_to_ty_method(tm).id; let d_id = ast_util::local_def(i.id); - cx.map.insert(id, node_trait_method(@*tm, d_id, item_path)); + cx.map.insert( + id, + node_trait_method(@copy *tm, d_id, item_path) + ); } } _ => () diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 7bf08aaaf76fc..59f25024c82b2 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -27,7 +27,7 @@ use core::vec; pub pure fn path_name_i(idents: &[ident], intr: @token::ident_interner) -> ~str { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") - str::connect(idents.map(|i| *intr.get(*i)), ~"::") + str::connect(idents.map(|i| copy *intr.get(*i)), ~"::") } @@ -283,7 +283,7 @@ pub fn split_trait_methods(trait_methods: &[trait_method]) let mut reqd = ~[], provd = ~[]; for trait_methods.each |trt_method| { match *trt_method { - required(ref tm) => reqd.push((*tm)), + required(ref tm) => reqd.push(copy *tm), provided(m) => provd.push(m) } }; diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs index 07db67d3173b2..fae5b1b49af2d 100644 --- a/src/libsyntax/ext/pipes/parse_proto.rs +++ b/src/libsyntax/ext/pipes/parse_proto.rs @@ -27,10 +27,14 @@ pub impl proto_parser for parser::Parser { fn parse_proto(&self, id: ~str) -> protocol { let proto = protocol(id, *self.span); - self.parse_seq_to_before_end(token::EOF, SeqSep { - sep: None, - trailing_sep_allowed: false - }, |self| self.parse_state(proto)); + self.parse_seq_to_before_end( + token::EOF, + SeqSep { + sep: None, + trailing_sep_allowed: false, + }, + |self| self.parse_state(proto) + ); return proto; } @@ -40,9 +44,9 @@ pub impl proto_parser for parser::Parser { let name = *self.interner.get(id); self.expect(&token::COLON); - let dir = match *self.token { - token::IDENT(n, _) => self.interner.get(n), - _ => fail!() + let dir = match copy *self.token { + token::IDENT(n, _) => self.interner.get(n), + _ => fail!() }; self.bump(); let dir = match dir { @@ -61,21 +65,29 @@ pub impl proto_parser for parser::Parser { // parse the messages self.parse_unspanned_seq( - token::LBRACE, token::RBRACE, SeqSep { + token::LBRACE, + token::RBRACE, + SeqSep { sep: Some(token::COMMA), - trailing_sep_allowed: true - }, |self| self.parse_message(state)); + trailing_sep_allowed: true, + }, + |self| self.parse_message(state) + ); } fn parse_message(&self, state: state) { let mname = *self.interner.get(self.parse_ident()); let args = if *self.token == token::LPAREN { - self.parse_unspanned_seq(token::LPAREN, - token::RPAREN, SeqSep { - sep: Some(token::COMMA), - trailing_sep_allowed: true - }, |p| p.parse_ty(false)) + self.parse_unspanned_seq( + token::LPAREN, + token::RPAREN, + SeqSep { + sep: Some(token::COMMA), + trailing_sep_allowed: true, + }, + |p| p.parse_ty(false) + ) } else { ~[] }; @@ -85,11 +97,15 @@ pub impl proto_parser for parser::Parser { token::IDENT(_, _) => { let name = *self.interner.get(self.parse_ident()); let ntys = if *self.token == token::LT { - self.parse_unspanned_seq(token::LT, - token::GT, SeqSep { - sep: Some(token::COMMA), - trailing_sep_allowed: true - }, |p| p.parse_ty(false)) + self.parse_unspanned_seq( + token::LT, + token::GT, + SeqSep { + sep: Some(token::COMMA), + trailing_sep_allowed: true, + }, + |p| p.parse_ty(false) + ) } else { ~[] }; Some(next_state {state: name, tys: ntys}) diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 890420edf6d68..713bf9afcd0f0 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -424,7 +424,7 @@ pub fn parse_nt(p: Parser, name: ~str) -> nonterminal { ~"ident" => match *p.token { token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) } _ => p.fatal(~"expected ident, found " - + token::to_str(p.reader.interner(), *p.token)) + + token::to_str(p.reader.interner(), copy *p.token)) }, ~"path" => token::nt_path(p.parse_path_with_tps(false)), ~"tt" => { diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index e74376afb0831..d67596e100fa0 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -280,7 +280,7 @@ pub fn noop_fold_item_underscore(i: &item_, fld: ast_fold) -> item_ { } item_mac(ref m) => { // FIXME #2888: we might actually want to do something here. - item_mac((*m)) + item_mac(copy *m) } } } diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 91abd804fb3ed..785270625157a 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -27,13 +27,13 @@ pub struct SeqSep { trailing_sep_allowed: bool } -pub fn seq_sep_trailing_disallowed(t: token::Token) -> SeqSep { +pub fn seq_sep_trailing_disallowed(+t: token::Token) -> SeqSep { SeqSep { sep: Some(t), trailing_sep_allowed: false, } } -pub fn seq_sep_trailing_allowed(t: token::Token) -> SeqSep { +pub fn seq_sep_trailing_allowed(+t: token::Token) -> SeqSep { SeqSep { sep: Some(t), trailing_sep_allowed: true, diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 02c2fb404c207..96ed81e476e7f 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -198,7 +198,7 @@ pub impl Parser { fn try_parse_obsolete_priv_section() -> bool { if self.is_keyword(&~"priv") && self.look_ahead(1) == token::LBRACE { - self.obsolete(*self.span, ObsoletePrivSection); + self.obsolete(copy *self.span, ObsoletePrivSection); self.eat_keyword(&~"priv"); self.bump(); while *self.token != token::RBRACE { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 7a0f248cd3df0..9fe53fe50e2d2 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -195,11 +195,10 @@ struct ParsedItemsAndViewItems { /* ident is handled by common.rs */ pub fn Parser(sess: @mut ParseSess, - cfg: ast::crate_cfg, + +cfg: ast::crate_cfg, +rdr: reader) -> Parser { - let tok0 = rdr.next_token(); - let span0 = tok0.sp; + let tok0 = copy rdr.next_token(); let interner = rdr.interner(); Parser { @@ -207,15 +206,15 @@ pub fn Parser(sess: @mut ParseSess, interner: interner, sess: sess, cfg: cfg, - token: @mut tok0.tok, - span: @mut span0, - last_span: @mut span0, - buffer: @mut [TokenAndSpan {tok: tok0.tok, sp: span0}, ..4], + token: @mut copy tok0.tok, + span: @mut copy tok0.sp, + last_span: @mut copy tok0.sp, + buffer: @mut [copy tok0, .. 4], buffer_start: @mut 0, buffer_end: @mut 0, - tokens_consumed: @mut 0u, + tokens_consumed: @mut 0, restriction: @mut UNRESTRICTED, - quote_depth: @mut 0u, + quote_depth: @mut 0, keywords: token::keyword_table(), strict_keywords: token::strict_keyword_table(), reserved_keywords: token::reserved_keyword_table(), @@ -253,20 +252,20 @@ pub struct Parser { pub impl Parser { // advance the parser by one token fn bump() { - *self.last_span = *self.span; + *self.last_span = copy *self.span; let next = if *self.buffer_start == *self.buffer_end { self.reader.next_token() } else { - let next = self.buffer[*self.buffer_start]; + let next = copy self.buffer[*self.buffer_start]; *self.buffer_start = (*self.buffer_start + 1) & 3; next }; - *self.token = next.tok; - *self.span = next.sp; + *self.token = copy next.tok; + *self.span = copy next.sp; *self.tokens_consumed += 1u; } // EFFECT: replace the current token and span with the given one - fn replace_token(next: token::Token, +lo: BytePos, +hi: BytePos) { + fn replace_token(+next: token::Token, +lo: BytePos, +hi: BytePos) { *self.token = next; *self.span = mk_sp(lo, hi); } @@ -461,7 +460,7 @@ pub impl Parser { let hi = p.last_span.hi; debug!("parse_trait_methods(): trait method signature ends in \ `%s`", - token_to_str(p.reader, *p.token)); + token_to_str(p.reader, copy *p.token)); match *p.token { token::SEMI => { p.bump(); @@ -499,8 +498,13 @@ pub impl Parser { }) } - _ => { p.fatal(~"expected `;` or `}` but found `" + - token_to_str(p.reader, *p.token) + ~"`"); + _ => { + p.fatal( + fmt!( + "expected `;` or `}` but found `%s`", + token_to_str(p.reader, copy *p.token) + ) + ); } } } @@ -649,7 +653,7 @@ pub impl Parser { self.parse_borrowed_pointee() } else if self.eat_keyword(&~"extern") { self.parse_ty_bare_fn() - } else if self.token_is_closure_keyword(&*self.token) { + } else if self.token_is_closure_keyword(© *self.token) { self.parse_ty_closure(None, None) } else if *self.token == token::MOD_SEP || is_ident_or_path(*self.token) { @@ -681,7 +685,7 @@ pub impl Parser { self.bump(); self.bump(); return self.parse_ty_closure(Some(sigil), Some(rname)); - } else if self.token_is_closure_keyword(&*self.token) { + } else if self.token_is_closure_keyword(© *self.token) { return self.parse_ty_closure(Some(sigil), None); } } @@ -716,7 +720,7 @@ pub impl Parser { _ => { None } }; - if self.token_is_closure_keyword(&*self.token) { + if self.token_is_closure_keyword(© *self.token) { return self.parse_ty_closure(Some(BorrowedSigil), rname); } @@ -841,9 +845,12 @@ pub impl Parser { } _ => { self.fatal( - fmt!("expected integral vector length \ - but found `%s`", - token_to_str(self.reader, *self.token))); + fmt!( + "expected integral vector length \ + but found `%s`", + token_to_str(self.reader, copy *self.token) + ) + ); } } } else { @@ -873,7 +880,7 @@ pub impl Parser { lit_bool(false) } else { // XXX: This is a really bad copy! - let tok = *self.token; + let tok = copy *self.token; self.bump(); self.lit_from_token(tok) }; @@ -1063,7 +1070,7 @@ pub impl Parser { } } - fn mk_mac_expr(+lo: BytePos, +hi: BytePos, m: mac_) -> @expr { + fn mk_mac_expr(+lo: BytePos, +hi: BytePos, +m: mac_) -> @expr { @expr { id: self.get_id(), callee_id: self.get_id(), @@ -1391,7 +1398,7 @@ pub impl Parser { self.bump(); (None, zerok) } else { - let sep = *self.token; + let sep = copy *self.token; self.bump(); if *self.token == token::BINOP(token::STAR) || *self.token == token::BINOP(token::PLUS) { @@ -1416,7 +1423,7 @@ pub impl Parser { p.fatal( fmt!( "incorrect close delimiter: `%s`", - token_to_str(p.reader, *p.token) + token_to_str(p.reader, copy *p.token) ) ); } @@ -1912,7 +1919,7 @@ pub impl Parser { // labeled loop headers look like 'loop foo: {' let is_labeled_loop_header = is_ident(*self.token) - && !self.is_any_keyword(&*self.token) + && !self.is_any_keyword(© *self.token) && self.look_ahead(1) == token::COLON; if is_loop_header || is_labeled_loop_header { @@ -2143,7 +2150,7 @@ pub impl Parser { let lo = self.span.lo; let mut hi = self.span.hi; let mut pat; - match *self.token { + match copy *self.token { token::UNDERSCORE => { self.bump(); pat = pat_wild; } token::AT => { self.bump(); @@ -2446,7 +2453,7 @@ pub impl Parser { let decl = self.parse_let(); return @spanned(lo, decl.span.hi, stmt_decl(decl, self.get_id())); } else if is_ident(*self.token) - && !self.is_any_keyword(&*self.token) + && !self.is_any_keyword(© *self.token) && self.look_ahead(1) == token::NOT { check_expected_item(self, first_item_attrs); @@ -2540,7 +2547,7 @@ pub impl Parser { let lo = self.span.lo; if self.eat_keyword(&~"unsafe") { - self.obsolete(*self.span, ObsoleteUnsafeBlock); + self.obsolete(copy *self.span, ObsoleteUnsafeBlock); } self.expect(&token::LBRACE); let (inner, next) = @@ -3049,7 +3056,7 @@ pub impl Parser { ty = self.parse_ty(false); opt_trait_ref } else if self.eat(&token::COLON) { - self.obsolete(*self.span, ObsoleteImplSyntax); + self.obsolete(copy *self.span, ObsoleteImplSyntax); Some(self.parse_trait_ref()) } else { None @@ -3116,7 +3123,7 @@ pub impl Parser { self.parse_region_param(); let ty_params = self.parse_ty_params(); if self.eat(&token::COLON) { - self.obsolete(*self.span, ObsoleteClassTraits); + self.obsolete(copy *self.span, ObsoleteClassTraits); let _ = self.parse_trait_ref_list(token::LBRACE); } @@ -3948,7 +3955,7 @@ pub impl Parser { vis: visibility, span: mk_sp(lo, self.last_span.hi) }); - } else if macros_allowed && !self.is_any_keyword(&*self.token) + } else if macros_allowed && !self.is_any_keyword(© *self.token) && self.look_ahead(1) == token::NOT && (is_plain_ident(self.look_ahead(2)) || self.look_ahead(2) == token::LPAREN @@ -4121,7 +4128,7 @@ pub impl Parser { fn is_view_item() -> bool { let tok, next_tok; if !self.is_keyword(&~"pub") && !self.is_keyword(&~"priv") { - tok = *self.token; + tok = copy *self.token; next_tok = self.look_ahead(1); } else { tok = self.look_ahead(1); From 3180d22dde253c86ff42eb8a3e936a7856477ca4 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 18:51:04 -0800 Subject: [PATCH 15/30] libsyntax: change parse_trait_ref_list to take a &Token --- src/libsyntax/parse/parser.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 9fe53fe50e2d2..ef274cb94abd7 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -3001,7 +3001,7 @@ pub impl Parser { let traits; if *self.token == token::COLON { self.bump(); - traits = self.parse_trait_ref_list(token::LBRACE); + traits = self.parse_trait_ref_list(&token::LBRACE); } else { traits = ~[]; } @@ -3110,9 +3110,9 @@ pub impl Parser { } } - fn parse_trait_ref_list(ket: token::Token) -> ~[@trait_ref] { + fn parse_trait_ref_list(ket: &token::Token) -> ~[@trait_ref] { self.parse_seq_to_before_end( - ket, + *ket, seq_sep_none(), |p| p.parse_trait_ref() ) @@ -3124,7 +3124,7 @@ pub impl Parser { let ty_params = self.parse_ty_params(); if self.eat(&token::COLON) { self.obsolete(copy *self.span, ObsoleteClassTraits); - let _ = self.parse_trait_ref_list(token::LBRACE); + let _ = self.parse_trait_ref_list(&token::LBRACE); } let mut fields: ~[@struct_field]; From cf6e21a17f04ddc766633f1f22144f7e6c59008c Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 18:55:24 -0800 Subject: [PATCH 16/30] libsyntax: change attr::parse_seq_* to take &Token --- src/libsyntax/ext/pipes/parse_proto.rs | 14 +++--- src/libsyntax/parse/attr.rs | 4 +- src/libsyntax/parse/common.rs | 22 ++++---- src/libsyntax/parse/parser.rs | 70 +++++++++++++------------- 4 files changed, 55 insertions(+), 55 deletions(-) diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs index fae5b1b49af2d..a5d2a1783d63f 100644 --- a/src/libsyntax/ext/pipes/parse_proto.rs +++ b/src/libsyntax/ext/pipes/parse_proto.rs @@ -28,7 +28,7 @@ pub impl proto_parser for parser::Parser { let proto = protocol(id, *self.span); self.parse_seq_to_before_end( - token::EOF, + &token::EOF, SeqSep { sep: None, trailing_sep_allowed: false, @@ -65,8 +65,8 @@ pub impl proto_parser for parser::Parser { // parse the messages self.parse_unspanned_seq( - token::LBRACE, - token::RBRACE, + &token::LBRACE, + &token::RBRACE, SeqSep { sep: Some(token::COMMA), trailing_sep_allowed: true, @@ -80,8 +80,8 @@ pub impl proto_parser for parser::Parser { let args = if *self.token == token::LPAREN { self.parse_unspanned_seq( - token::LPAREN, - token::RPAREN, + &token::LPAREN, + &token::RPAREN, SeqSep { sep: Some(token::COMMA), trailing_sep_allowed: true, @@ -98,8 +98,8 @@ pub impl proto_parser for parser::Parser { let name = *self.interner.get(self.parse_ident()); let ntys = if *self.token == token::LT { self.parse_unspanned_seq( - token::LT, - token::GT, + &token::LT, + &token::GT, SeqSep { sep: Some(token::COMMA), trailing_sep_allowed: true, diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index f7b115912da23..bad6c76c36a2b 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -152,8 +152,8 @@ impl parser_attr for Parser { fn parse_meta_seq() -> ~[@ast::meta_item] { self.parse_seq( - token::LPAREN, - token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), |p| p.parse_meta_item() ).node diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 785270625157a..93d3b8b336416 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -295,7 +295,7 @@ pub impl Parser { // f must consume tokens until reaching the next separator or // closing bracket. fn parse_seq_to_end( - ket: token::Token, + ket: &token::Token, sep: SeqSep, f: fn(Parser) -> T ) -> ~[T] { @@ -308,13 +308,13 @@ pub impl Parser { // f must consume tokens until reaching the next separator or // closing bracket. fn parse_seq_to_before_end( - ket: token::Token, + ket: &token::Token, sep: SeqSep, f: fn(Parser) -> T ) -> ~[T] { let mut first: bool = true; let mut v: ~[T] = ~[]; - while *self.token != ket { + while *self.token != *ket { match sep.sep { Some(ref t) => { if first { first = false; } @@ -322,22 +322,22 @@ pub impl Parser { } _ => () } - if sep.trailing_sep_allowed && *self.token == ket { break; } + if sep.trailing_sep_allowed && *self.token == *ket { break; } v.push(f(self)); } - v + return v; } // parse a sequence, including the closing delimiter. The function // f must consume tokens until reaching the next separator or // closing bracket. fn parse_unspanned_seq( - +bra: token::Token, - +ket: token::Token, + bra: &token::Token, + ket: &token::Token, sep: SeqSep, f: fn(Parser) -> T ) -> ~[T] { - self.expect(&bra); + self.expect(bra); let result = self.parse_seq_to_before_end(ket, sep, f); self.bump(); result @@ -346,13 +346,13 @@ pub impl Parser { // NB: Do not use this function unless you actually plan to place the // spanned list in the AST. fn parse_seq( - +bra: token::Token, - +ket: token::Token, + bra: &token::Token, + ket: &token::Token, sep: SeqSep, f: fn(Parser) -> T ) -> spanned<~[T]> { let lo = self.span.lo; - self.expect(&bra); + self.expect(bra); let result = self.parse_seq_to_before_end(ket, sep, f); let hi = self.span.hi; self.bump(); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index ef274cb94abd7..1e6473c3cb177 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -421,8 +421,8 @@ pub impl Parser { self.expect(&token::GT); } let inputs = self.parse_unspanned_seq( - token::LPAREN, - token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), |p| p.parse_arg_general(false) ); @@ -432,8 +432,8 @@ pub impl Parser { fn parse_trait_methods() -> ~[trait_method] { do self.parse_unspanned_seq( - token::LBRACE, - token::RBRACE, + &token::LBRACE, + &token::RBRACE, seq_sep_none() ) |p| { let attrs = p.parse_outer_attributes(); @@ -628,8 +628,8 @@ pub impl Parser { ty_ptr(self.parse_mt()) } else if *self.token == token::LBRACE { let elems = self.parse_unspanned_seq( - token::LBRACE, - token::RBRACE, + &token::LBRACE, + &token::RBRACE, seq_sep_trailing_allowed(token::COMMA), |p| p.parse_ty_field() ); @@ -1190,7 +1190,7 @@ pub impl Parser { // Vector with two or more elements. self.bump(); let remaining_exprs = self.parse_seq_to_end( - token::RBRACKET, + &token::RBRACKET, seq_sep_trailing_allowed(token::COMMA), |p| p.parse_expr() ); @@ -1246,8 +1246,8 @@ pub impl Parser { let ket = token::flip_delimiter(&*self.token); let tts = self.parse_unspanned_seq( - *self.token, - ket, + © *self.token, + &ket, seq_sep_none(), |p| p.parse_token_tree() ); @@ -1339,8 +1339,8 @@ pub impl Parser { match *self.token { token::LPAREN if self.permits_call() => { let es = self.parse_unspanned_seq( - token::LPAREN, - token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), |p| p.parse_expr() ); @@ -1363,8 +1363,8 @@ pub impl Parser { // expr(...) token::LPAREN if self.permits_call() => { let es = self.parse_unspanned_seq( - token::LPAREN, - token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), |p| p.parse_expr() ); @@ -1434,8 +1434,8 @@ pub impl Parser { if *p.token == token::LPAREN { let seq = p.parse_seq( - token::LPAREN, - token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_none(), |p| p.parse_token_tree() ); @@ -1471,7 +1471,7 @@ pub impl Parser { ~[parse_any_tt_tok(self)], vec::append( self.parse_seq_to_before_end( - ket, + &ket, seq_sep_none(), |p| p.parse_token_tree() ), @@ -2320,8 +2320,8 @@ pub impl Parser { } _ => { args = self.parse_unspanned_seq( - token::LPAREN, - token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_trailing_disallowed( token::COMMA ), @@ -2470,8 +2470,8 @@ pub impl Parser { }; let tts = self.parse_unspanned_seq( - token::LPAREN, - token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_none(), |p| p.parse_token_tree() ); @@ -2780,8 +2780,8 @@ pub impl Parser { { let args_or_capture_items: ~[arg_or_capture_item] = self.parse_unspanned_seq( - token::LPAREN, - token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), parse_arg_fn ); @@ -2865,7 +2865,7 @@ pub impl Parser { self.bump(); let sep = seq_sep_trailing_disallowed(token::COMMA); args_or_capture_items = self.parse_seq_to_before_end( - token::RPAREN, + &token::RPAREN, sep, parse_arg_fn ); @@ -2882,7 +2882,7 @@ pub impl Parser { } else { let sep = seq_sep_trailing_disallowed(token::COMMA); args_or_capture_items = self.parse_seq_to_before_end( - token::RPAREN, + &token::RPAREN, sep, parse_arg_fn ); @@ -2910,8 +2910,8 @@ pub impl Parser { ~[] } else { self.parse_unspanned_seq( - token::BINOP(token::OR), - token::BINOP(token::OR), + &token::BINOP(token::OR), + &token::BINOP(token::OR), seq_sep_trailing_disallowed(token::COMMA), |p| p.parse_fn_block_arg() ) @@ -3112,7 +3112,7 @@ pub impl Parser { fn parse_trait_ref_list(ket: &token::Token) -> ~[@trait_ref] { self.parse_seq_to_before_end( - *ket, + ket, seq_sep_none(), |p| p.parse_trait_ref() ) @@ -3163,8 +3163,8 @@ pub impl Parser { // It's a tuple-like struct. is_tuple_like = true; fields = do self.parse_unspanned_seq( - token::LPAREN, - token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_trailing_allowed(token::COMMA) ) |p| { let lo = p.span.lo; @@ -3729,8 +3729,8 @@ pub impl Parser { } else if *self.token == token::LPAREN { all_nullary = false; let arg_tys = self.parse_unspanned_seq( - token::LPAREN, - token::RPAREN, + &token::LPAREN, + &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), |p| p.parse_ty(false) ); @@ -3982,8 +3982,8 @@ pub impl Parser { token::LPAREN | token::LBRACE => { let ket = token::flip_delimiter(&*self.token); self.parse_unspanned_seq( - *self.token, - ket, + © *self.token, + &ket, seq_sep_none(), |p| p.parse_token_tree() ) @@ -4074,8 +4074,8 @@ pub impl Parser { // foo::bar::{a,b,c} token::LBRACE => { let idents = self.parse_unspanned_seq( - token::LBRACE, - token::RBRACE, + &token::LBRACE, + &token::RBRACE, seq_sep_trailing_allowed(token::COMMA), |p| p.parse_path_list_ident() ); From 380597eba35be0eb12c59180e1eea97264cd3cc7 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 19:04:17 -0800 Subject: [PATCH 17/30] libsyntax: change parse_matcher_subseq to take &Token --- src/libsyntax/parse/parser.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 1e6473c3cb177..fc69605a92720 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1502,9 +1502,9 @@ pub impl Parser { token::LBRACE | token::LPAREN | token::LBRACKET => { self.parse_matcher_subseq( name_idx, - *self.token, + &*self.token, // tjc: not sure why we need a copy - token::flip_delimiter(&*self.token) + &token::flip_delimiter(&*self.token) ) } _ => self.fatal(~"expected open delimiter") @@ -1517,15 +1517,15 @@ pub impl Parser { // invalid. It's similar to common::parse_seq. fn parse_matcher_subseq( name_idx: @mut uint, - bra: token::Token, - ket: token::Token + bra: &token::Token, + ket: &token::Token ) -> ~[matcher] { let mut ret_val = ~[]; let mut lparens = 0u; - self.expect(&bra); + self.expect(bra); - while *self.token != ket || lparens > 0u { + while *self.token != *ket || lparens > 0u { if *self.token == token::LPAREN { lparens += 1u; } if *self.token == token::RPAREN { lparens -= 1u; } ret_val.push(self.parse_matcher(name_idx)); @@ -1545,8 +1545,8 @@ pub impl Parser { let name_idx_lo = *name_idx; let ms = self.parse_matcher_subseq( name_idx, - token::LPAREN, - token::RPAREN + &token::LPAREN, + &token::RPAREN ); if ms.len() == 0u { self.fatal(~"repetition body must be nonempty"); From 194f29c20fb86fa163f35e9cd0540ae2b0d41b9d Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 19:27:43 -0800 Subject: [PATCH 18/30] libsyntax: minor cleanup --- src/libsyntax/parse/parser.rs | 99 ++++++++++++++++++++++------------- 1 file changed, 62 insertions(+), 37 deletions(-) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index fc69605a92720..05f6a3e7517a7 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2111,9 +2111,12 @@ pub impl Parser { if *self.token == token::UNDERSCORE { self.bump(); if *self.token != token::RBRACE { - self.fatal(~"expected `}`, found `" + - token_to_str(self.reader, *self.token) + - ~"`"); + self.fatal( + fmt!( + "expected `}`, found `%s`", + token_to_str(self.reader, copy *self.token) + ) + ); } etc = true; break; @@ -2195,7 +2198,6 @@ pub impl Parser { } _ => pat_uniq(sub) }; - } token::BINOP(token::AND) => { let lo = self.span.lo; @@ -2806,8 +2808,12 @@ pub impl Parser { fn expect_self_ident() { if !self.is_self_ident() { - self.fatal(fmt!("expected `self` but found `%s`", - token_to_str(self.reader, *self.token))); + self.fatal( + fmt!( + "expected `self` but found `%s`", + token_to_str(self.reader, copy *self.token) + ) + ); } self.bump(); } @@ -2874,9 +2880,12 @@ pub impl Parser { args_or_capture_items = ~[]; } _ => { - self.fatal(~"expected `,` or `)`, found `" + - token_to_str(self.reader, *self.token) + - ~"`"); + self.fatal( + fmt!( + "expected `,` or `)`, found `%s`", + token_to_str(self.reader, copy *self.token) + ) + ); } } } else { @@ -3181,9 +3190,13 @@ pub impl Parser { is_tuple_like = true; fields = ~[]; } else { - self.fatal(fmt!("expected `{`, `(`, or `;` after struct name \ - but found `%s`", - token_to_str(self.reader, *self.token))); + self.fatal( + fmt!( + "expected `{`, `(`, or `;` after struct name \ + but found `%s`", + token_to_str(self.reader, copy *self.token) + ) + ); } let actual_dtor = do the_dtor.map |dtor| { @@ -3218,21 +3231,23 @@ pub impl Parser { let a_var = self.parse_instance_var(vis); match *self.token { - token::SEMI => { - self.obsolete(*self.span, ObsoleteFieldTerminator); - self.bump(); - } - token::COMMA => { - self.bump(); - } - token::RBRACE => {} - _ => { - self.span_fatal(*self.span, - fmt!("expected `;`, `,`, or '}' but \ - found `%s`", - token_to_str(self.reader, - *self.token))); - } + token::SEMI => { + self.obsolete(copy *self.span, ObsoleteFieldTerminator); + self.bump(); + } + token::COMMA => { + self.bump(); + } + token::RBRACE => {} + _ => { + self.span_fatal( + copy *self.span, + fmt!( + "expected `;`, `,`, or '}' but found `%s`", + token_to_str(self.reader, copy *self.token) + ) + ); + } } a_var } @@ -3316,8 +3331,12 @@ pub impl Parser { module"); } _ => { - self.fatal(~"expected item but found `" + - token_to_str(self.reader, *self.token) + ~"`"); + self.fatal( + fmt!( + "expected item but found `%s`", + token_to_str(self.reader, copy *self.token) + ) + ); } } debug!("parse_mod_items: attrs=%?", attrs); @@ -3567,20 +3586,26 @@ pub impl Parser { must_be_named_mod = true; self.expect_keyword(&~"mod"); } else if *self.token != token::LBRACE { - self.span_fatal(*self.span, - fmt!("expected `{` or `mod` but found %s", - token_to_str(self.reader, *self.token))); + self.span_fatal( + copy *self.span, + fmt!( + "expected `{` or `mod` but found `%s`", + token_to_str(self.reader, copy *self.token) + ) + ); } let (sort, ident) = match *self.token { token::IDENT(*) => (ast::named, self.parse_ident()), _ => { if must_be_named_mod { - self.span_fatal(*self.span, - fmt!("expected foreign module name but \ - found %s", - token_to_str(self.reader, - *self.token))); + self.span_fatal( + copy *self.span, + fmt!( + "expected foreign module name but found `%s`", + token_to_str(self.reader, copy *self.token) + ) + ); } (ast::anonymous, From d346b51997a4a5d9f2e85aa41fc3113338b8a83b Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 19:54:37 -0800 Subject: [PATCH 19/30] libsyntax: change token::to_str to take &Token --- src/libsyntax/ext/tt/macro_parser.rs | 4 +-- src/libsyntax/parse/comments.rs | 2 +- src/libsyntax/parse/common.rs | 43 ++++++++++++++-------------- src/libsyntax/parse/parser.rs | 37 +++++++++++++----------- src/libsyntax/parse/token.rs | 4 +-- src/libsyntax/print/pprust.rs | 4 +-- 6 files changed, 50 insertions(+), 44 deletions(-) diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 713bf9afcd0f0..82e7de08d6533 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -378,7 +378,7 @@ pub fn parse(sess: @mut ParseSess, nts, next_eis.len())); } else if (bb_eis.len() == 0u && next_eis.len() == 0u) { return failure(sp, ~"No rules expected the token: " - + to_str(rdr.interner(), tok)); + + to_str(rdr.interner(), &tok)); } else if (next_eis.len() > 0u) { /* Now process the next token */ while(next_eis.len() > 0u) { @@ -424,7 +424,7 @@ pub fn parse_nt(p: Parser, name: ~str) -> nonterminal { ~"ident" => match *p.token { token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) } _ => p.fatal(~"expected ident, found " - + token::to_str(p.reader.interner(), copy *p.token)) + + token::to_str(p.reader.interner(), © *p.token)) }, ~"path" => token::nt_path(p.parse_path_with_tps(false)), ~"tt" => { diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index 152bd9b0ce417..377b089c532a4 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -359,7 +359,7 @@ pub fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, literals.push(lit {lit: s, pos: sp.lo}); log(debug, ~"tok lit: " + s); } else { - log(debug, ~"tok: " + token::to_str(rdr.interner, tok)); + log(debug, ~"tok: " + token::to_str(rdr.interner, &tok)); } first_read = false; } diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 93d3b8b336416..764ff52a303da 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -46,7 +46,7 @@ pub fn seq_sep_none() -> SeqSep { } } -pub fn token_to_str(reader: reader, ++token: token::Token) -> ~str { +pub fn token_to_str(reader: reader, token: &token::Token) -> ~str { token::to_str(reader.interner(), token) } @@ -56,7 +56,7 @@ pub impl Parser { *self.last_span, fmt!( "unexpected token: `%s`", - token_to_str(self.reader, t) + token_to_str(self.reader, &t) ) ); } @@ -65,7 +65,7 @@ pub impl Parser { self.fatal( fmt!( "unexpected token: `%s`", - token_to_str(self.reader, *self.token) + token_to_str(self.reader, © *self.token) ) ); } @@ -77,9 +77,10 @@ pub impl Parser { self.bump(); } else { self.fatal( - fmt!("expected `%s` but found `%s`", - token_to_str(self.reader, *t), - token_to_str(self.reader, *self.token) + fmt!( + "expected `%s` but found `%s`", + token_to_str(self.reader, t), + token_to_str(self.reader, © *self.token) ) ) } @@ -102,7 +103,7 @@ pub impl Parser { self.fatal( fmt!( "expected ident, found `%s`", - token_to_str(self.reader, *self.token) + token_to_str(self.reader, © *self.token) ) ); } @@ -149,7 +150,7 @@ pub impl Parser { } fn is_keyword(word: &~str) -> bool { - self.token_is_keyword(word, &*self.token) + self.token_is_keyword(word, © *self.token) } fn is_any_keyword(tok: &token::Token) -> bool { @@ -178,7 +179,7 @@ pub impl Parser { fmt!( "expected `%s`, found `%s`", *word, - token_to_str(self.reader, *self.token) + token_to_str(self.reader, © *self.token) ) ); } @@ -190,11 +191,11 @@ pub impl Parser { fn check_strict_keywords() { match *self.token { - token::IDENT(_, false) => { - let w = token_to_str(self.reader, *self.token); - self.check_strict_keywords_(&w); - } - _ => () + token::IDENT(_, false) => { + let w = token_to_str(self.reader, © *self.token); + self.check_strict_keywords_(&w); + } + _ => () } } @@ -210,11 +211,11 @@ pub impl Parser { fn check_reserved_keywords() { match *self.token { - token::IDENT(_, false) => { - let w = token_to_str(self.reader, *self.token); - self.check_reserved_keywords_(&w); - } - _ => () + token::IDENT(_, false) => { + let w = token_to_str(self.reader, © *self.token); + self.check_reserved_keywords_(&w); + } + _ => () } } @@ -237,9 +238,9 @@ pub impl Parser { ); } else { let mut s: ~str = ~"expected `"; - s += token_to_str(self.reader, token::GT); + s += token_to_str(self.reader, &token::GT); s += ~"`, found `"; - s += token_to_str(self.reader, *self.token); + s += token_to_str(self.reader, © *self.token); s += ~"`"; self.fatal(s); } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 05f6a3e7517a7..d0c6019e09a01 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -460,7 +460,7 @@ pub impl Parser { let hi = p.last_span.hi; debug!("parse_trait_methods(): trait method signature ends in \ `%s`", - token_to_str(p.reader, copy *p.token)); + token_to_str(p.reader, © *p.token)); match *p.token { token::SEMI => { p.bump(); @@ -502,7 +502,7 @@ pub impl Parser { p.fatal( fmt!( "expected `;` or `}` but found `%s`", - token_to_str(p.reader, copy *p.token) + token_to_str(p.reader, © *p.token) ) ); } @@ -848,7 +848,7 @@ pub impl Parser { fmt!( "expected integral vector length \ but found `%s`", - token_to_str(self.reader, copy *self.token) + token_to_str(self.reader, © *self.token) ) ); } @@ -1423,7 +1423,7 @@ pub impl Parser { p.fatal( fmt!( "incorrect close delimiter: `%s`", - token_to_str(p.reader, copy *p.token) + token_to_str(p.reader, © *p.token) ) ); } @@ -2114,7 +2114,7 @@ pub impl Parser { self.fatal( fmt!( "expected `}`, found `%s`", - token_to_str(self.reader, copy *self.token) + token_to_str(self.reader, © *self.token) ) ); } @@ -2621,10 +2621,15 @@ pub impl Parser { copy t => { if classify::stmt_ends_with_semi(*stmt) { self.fatal( - ~"expected `;` or `}` after \ - expression but found `" - + token_to_str(self.reader, t) - + ~"`"); + fmt!( + "expected `;` or `}` after \ + expression but found `%s`", + token_to_str( + self.reader, + &t + ) + ) + ); } stmts.push(stmt); } @@ -2811,7 +2816,7 @@ pub impl Parser { self.fatal( fmt!( "expected `self` but found `%s`", - token_to_str(self.reader, copy *self.token) + token_to_str(self.reader, © *self.token) ) ); } @@ -2883,7 +2888,7 @@ pub impl Parser { self.fatal( fmt!( "expected `,` or `)`, found `%s`", - token_to_str(self.reader, copy *self.token) + token_to_str(self.reader, © *self.token) ) ); } @@ -3194,7 +3199,7 @@ pub impl Parser { fmt!( "expected `{`, `(`, or `;` after struct name \ but found `%s`", - token_to_str(self.reader, copy *self.token) + token_to_str(self.reader, © *self.token) ) ); } @@ -3244,7 +3249,7 @@ pub impl Parser { copy *self.span, fmt!( "expected `;`, `,`, or '}' but found `%s`", - token_to_str(self.reader, copy *self.token) + token_to_str(self.reader, © *self.token) ) ); } @@ -3334,7 +3339,7 @@ pub impl Parser { self.fatal( fmt!( "expected item but found `%s`", - token_to_str(self.reader, copy *self.token) + token_to_str(self.reader, © *self.token) ) ); } @@ -3590,7 +3595,7 @@ pub impl Parser { copy *self.span, fmt!( "expected `{` or `mod` but found `%s`", - token_to_str(self.reader, copy *self.token) + token_to_str(self.reader, © *self.token) ) ); } @@ -3603,7 +3608,7 @@ pub impl Parser { copy *self.span, fmt!( "expected foreign module name but found `%s`", - token_to_str(self.reader, copy *self.token) + token_to_str(self.reader, © *self.token) ) ); } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 8e88ac1d52545..bc61ce9e7fcd9 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -130,8 +130,8 @@ pub fn binop_to_str(o: binop) -> ~str { } } -pub fn to_str(in: @ident_interner, t: Token) -> ~str { - match t { +pub fn to_str(in: @ident_interner, t: &Token) -> ~str { + match *t { EQ => ~"=", LT => ~"<", LE => ~"<=", diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index b4773fe2f97b9..7cf297b324bda 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -777,14 +777,14 @@ pub fn print_tt(s: @ps, tt: ast::token_tree) { match tt { ast::tt_delim(ref tts) => print_tts(s, *tts), ast::tt_tok(_, ref tk) => { - word(s.s, parse::token::to_str(s.intr, (*tk))); + word(s.s, parse::token::to_str(s.intr, tk)); } ast::tt_seq(_, ref tts, ref sep, zerok) => { word(s.s, ~"$("); for (*tts).each() |tt_elt| { print_tt(s, *tt_elt); } word(s.s, ~")"); match (*sep) { - Some(ref tk) => word(s.s, parse::token::to_str(s.intr, (*tk))), + Some(ref tk) => word(s.s, parse::token::to_str(s.intr, tk)), None => () } word(s.s, if zerok { ~"*" } else { ~"+" }); From ff36986fa490917bcacfb4e5010e304d5e82f3bb Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 17:24:28 -0800 Subject: [PATCH 20/30] libsyntax: change token fns to take &Token --- src/libsyntax/parse/comments.rs | 2 +- src/libsyntax/parse/parser.rs | 40 ++++++++++++++++----------------- src/libsyntax/parse/token.rs | 36 ++++++++++++++--------------- 3 files changed, 39 insertions(+), 39 deletions(-) diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index 377b089c532a4..4960563db8875 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -354,7 +354,7 @@ pub fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, rdr.next_token(); //discard, and look ahead; we're working with internal state let TokenAndSpan {tok: tok, sp: sp} = rdr.peek(); - if token::is_lit(tok) { + if token::is_lit(&tok) { let s = get_str_from(rdr, bstart); literals.push(lit {lit: s, pos: sp.lo}); log(debug, ~"tok lit: " + s); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index d0c6019e09a01..a522612794740 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -656,7 +656,7 @@ pub impl Parser { } else if self.token_is_closure_keyword(© *self.token) { self.parse_ty_closure(None, None) } else if *self.token == token::MOD_SEP - || is_ident_or_path(*self.token) { + || is_ident_or_path(&*self.token) { let path = self.parse_path_with_tps(colons_before_params); ty_path(path, self.get_id()) } else { @@ -760,10 +760,10 @@ pub impl Parser { } } else { 0 }; if offset == 0 { - is_plain_ident(*self.token) + is_plain_ident(&*self.token) && self.look_ahead(1) == token::COLON } else { - is_plain_ident(self.look_ahead(offset)) + is_plain_ident(&self.look_ahead(offset)) && self.look_ahead(offset + 1) == token::COLON } } @@ -1141,7 +1141,7 @@ pub impl Parser { return self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk)); } - } else if token::is_bar(*self.token) { + } else if token::is_bar(&*self.token) { return self.parse_lambda_expr(); } else if self.eat_keyword(&~"if") { return self.parse_if_expr(); @@ -1215,13 +1215,13 @@ pub impl Parser { ex = expr_assert(e); hi = e.span.hi; } else if self.eat_keyword(&~"return") { - if can_begin_expr(*self.token) { + if can_begin_expr(&*self.token) { let e = self.parse_expr(); hi = e.span.hi; ex = expr_ret(Some(e)); } else { ex = expr_ret(None); } } else if self.eat_keyword(&~"break") { - if is_ident(*self.token) { + if is_ident(&*self.token) { ex = expr_break(Some(self.parse_ident())); } else { ex = expr_break(None); @@ -1232,7 +1232,7 @@ pub impl Parser { ex = expr_copy(e); hi = e.span.hi; } else if *self.token == token::MOD_SEP || - is_ident(*self.token) && !self.is_keyword(&~"true") && + is_ident(&*self.token) && !self.is_keyword(&~"true") && !self.is_keyword(&~"false") { let pth = self.parse_path_with_tps(true); @@ -1914,11 +1914,11 @@ pub impl Parser { // loop headers look like 'loop {' or 'loop unsafe {' let is_loop_header = *self.token == token::LBRACE - || (is_ident(*self.token) + || (is_ident(&*self.token) && self.look_ahead(1) == token::LBRACE); // labeled loop headers look like 'loop foo: {' let is_labeled_loop_header = - is_ident(*self.token) + is_ident(&*self.token) && !self.is_any_keyword(© *self.token) && self.look_ahead(1) == token::COLON; @@ -1939,7 +1939,7 @@ pub impl Parser { } else { // This is a 'continue' expression let lo = self.span.lo; - let ex = if is_ident(*self.token) { + let ex = if is_ident(&*self.token) { expr_again(Some(self.parse_ident())) } else { expr_again(None) @@ -1954,7 +1954,7 @@ pub impl Parser { let lookahead = self.look_ahead(1); *self.token == token::LBRACE && (self.token_is_keyword(&~"mut", &lookahead) || - (is_plain_ident(lookahead) && + (is_plain_ident(&lookahead) && self.look_ahead(2) == token::COLON)) } @@ -2260,7 +2260,7 @@ pub impl Parser { pat = ast::pat_vec(elements, tail); } copy tok => { - if !is_ident_or_path(tok) + if !is_ident_or_path(&tok) || self.is_keyword(&~"true") || self.is_keyword(&~"false") { @@ -2290,7 +2290,7 @@ pub impl Parser { cannot_be_enum_or_struct = true } - if is_plain_ident(*self.token) && cannot_be_enum_or_struct { + if is_plain_ident(&*self.token) && cannot_be_enum_or_struct { let name = self.parse_value_path(); let sub; if self.eat(&token::AT) { @@ -2359,7 +2359,7 @@ pub impl Parser { fn parse_pat_ident(refutable: bool, binding_mode: ast::binding_mode) -> ast::pat_ { - if !is_plain_ident(*self.token) { + if !is_plain_ident(&*self.token) { self.span_fatal( *self.last_span, ~"expected identifier, found path"); @@ -2425,7 +2425,7 @@ pub impl Parser { if self.eat_keyword(&~"mut") { is_mutbl = struct_mutable; } - if !is_plain_ident(*self.token) { + if !is_plain_ident(&*self.token) { self.fatal(~"expected ident"); } let name = self.parse_ident(); @@ -2454,7 +2454,7 @@ pub impl Parser { self.expect_keyword(&~"let"); let decl = self.parse_let(); return @spanned(lo, decl.span.hi, stmt_decl(decl, self.get_id())); - } else if is_ident(*self.token) + } else if is_ident(&*self.token) && !self.is_any_keyword(© *self.token) && self.look_ahead(1) == token::NOT { @@ -2716,7 +2716,7 @@ pub impl Parser { ~"`&static` is the only permissible \ region bound here"); } - } else if is_ident(*self.token) { + } else if is_ident(&*self.token) { let maybe_bound = match *self.token { token::IDENT(copy sid, _) => { match *self.id_to_str(sid) { @@ -2757,7 +2757,7 @@ pub impl Parser { loop; } - if is_ident_or_path(*self.token) { + if is_ident_or_path(&*self.token) { self.obsolete(*self.span, ObsoleteTraitBoundSeparator); } @@ -3987,7 +3987,7 @@ pub impl Parser { }); } else if macros_allowed && !self.is_any_keyword(© *self.token) && self.look_ahead(1) == token::NOT - && (is_plain_ident(self.look_ahead(2)) + && (is_plain_ident(&self.look_ahead(2)) || self.look_ahead(2) == token::LPAREN || self.look_ahead(2) == token::LBRACE) { // MACRO INVOCATION ITEM @@ -4002,7 +4002,7 @@ pub impl Parser { // a 'special' identifier (like what `macro_rules!` uses) // is optional. We should eventually unify invoc syntax // and remove this. - let id = if is_plain_ident(*self.token) { + let id = if is_plain_ident(&*self.token) { self.parse_ident() } else { token::special_idents::invalid // no special identifier diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index bc61ce9e7fcd9..bb1f8f1d1d9a2 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -225,8 +225,8 @@ pub fn to_str(in: @ident_interner, t: &Token) -> ~str { } } -pub pure fn can_begin_expr(t: Token) -> bool { - match t { +pub pure fn can_begin_expr(t: &Token) -> bool { + match *t { LPAREN => true, LBRACE => true, LBRACKET => true, @@ -259,20 +259,20 @@ pub pure fn can_begin_expr(t: Token) -> bool { /// what's the opposite delimiter? pub fn flip_delimiter(t: &token::Token) -> token::Token { match *t { - token::LPAREN => token::RPAREN, - token::LBRACE => token::RBRACE, - token::LBRACKET => token::RBRACKET, - token::RPAREN => token::LPAREN, - token::RBRACE => token::LBRACE, - token::RBRACKET => token::LBRACKET, + LPAREN => RPAREN, + LBRACE => RBRACE, + LBRACKET => RBRACKET, + RPAREN => LPAREN, + RBRACE => LBRACE, + RBRACKET => LBRACKET, _ => fail!() } } -pub fn is_lit(t: Token) -> bool { - match t { +pub fn is_lit(t: &Token) -> bool { + match *t { LIT_INT(_, _) => true, LIT_UINT(_, _) => true, LIT_INT_UNSUFFIXED(_) => true, @@ -283,23 +283,23 @@ pub fn is_lit(t: Token) -> bool { } } -pub pure fn is_ident(t: Token) -> bool { - match t { IDENT(_, _) => true, _ => false } +pub pure fn is_ident(t: &Token) -> bool { + match *t { IDENT(_, _) => true, _ => false } } -pub pure fn is_ident_or_path(t: Token) -> bool { - match t { +pub pure fn is_ident_or_path(t: &Token) -> bool { + match *t { IDENT(_, _) | INTERPOLATED(nt_path(*)) => true, _ => false } } -pub pure fn is_plain_ident(t: Token) -> bool { - match t { IDENT(_, false) => true, _ => false } +pub pure fn is_plain_ident(t: &Token) -> bool { + match *t { IDENT(_, false) => true, _ => false } } -pub pure fn is_bar(t: Token) -> bool { - match t { BINOP(OR) | OROR => true, _ => false } +pub pure fn is_bar(t: &Token) -> bool { + match *t { BINOP(OR) | OROR => true, _ => false } } From 28691a0852854a9996d07ab92bb55e92beef2c98 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 20:51:56 -0800 Subject: [PATCH 21/30] libsyntax: more minor cleanup --- src/libsyntax/parse/attr.rs | 4 +-- src/libsyntax/parse/common.rs | 8 +++--- src/libsyntax/parse/parser.rs | 52 +++++++++++++++++------------------ 3 files changed, 32 insertions(+), 32 deletions(-) diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index bad6c76c36a2b..56e350db47ba1 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -161,8 +161,8 @@ impl parser_attr for Parser { fn parse_optional_meta() -> ~[@ast::meta_item] { match *self.token { - token::LPAREN => return self.parse_meta_seq(), - _ => return ~[] + token::LPAREN => self.parse_meta_seq(), + _ => ~[] } } } diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 764ff52a303da..4632ceb6e7d40 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -139,8 +139,8 @@ pub impl Parser { fn token_is_word(word: &~str, tok: &token::Token) -> bool { match *tok { - token::IDENT(sid, false) => { *self.id_to_str(sid) == *word } - _ => { false } + token::IDENT(sid, false) => { *self.id_to_str(sid) == *word } + _ => { false } } } @@ -165,8 +165,8 @@ pub impl Parser { fn eat_keyword(word: &~str) -> bool { self.require_keyword(word); let is_kw = match *self.token { - token::IDENT(sid, false) => *word == *self.id_to_str(sid), - _ => false + token::IDENT(sid, false) => *word == *self.id_to_str(sid), + _ => false }; if is_kw { self.bump() } is_kw diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index a522612794740..9b825512c39d9 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -633,7 +633,7 @@ pub impl Parser { seq_sep_trailing_allowed(token::COMMA), |p| p.parse_ty_field() ); - if vec::len(elems) == 0u { + if elems.len() == 0 { self.unexpected_last(token::RBRACE); } ty_rec(elems) @@ -858,17 +858,17 @@ pub impl Parser { } } - fn lit_from_token(tok: token::Token) -> lit_ { - match tok { - token::LIT_INT(i, it) => lit_int(i, it), - token::LIT_UINT(u, ut) => lit_uint(u, ut), - token::LIT_INT_UNSUFFIXED(i) => lit_int_unsuffixed(i), - token::LIT_FLOAT(s, ft) => lit_float(self.id_to_str(s), ft), - token::LIT_FLOAT_UNSUFFIXED(s) => - lit_float_unsuffixed(self.id_to_str(s)), - token::LIT_STR(s) => lit_str(self.id_to_str(s)), - token::LPAREN => { self.expect(&token::RPAREN); lit_nil }, - _ => { self.unexpected_last(tok); } + fn lit_from_token(tok: &token::Token) -> lit_ { + match *tok { + token::LIT_INT(i, it) => lit_int(i, it), + token::LIT_UINT(u, ut) => lit_uint(u, ut), + token::LIT_INT_UNSUFFIXED(i) => lit_int_unsuffixed(i), + token::LIT_FLOAT(s, ft) => lit_float(self.id_to_str(s), ft), + token::LIT_FLOAT_UNSUFFIXED(s) => + lit_float_unsuffixed(self.id_to_str(s)), + token::LIT_STR(s) => lit_str(self.id_to_str(s)), + token::LPAREN => { self.expect(&token::RPAREN); lit_nil }, + _ => { self.unexpected_last(*tok); } } } @@ -882,7 +882,7 @@ pub impl Parser { // XXX: This is a really bad copy! let tok = copy *self.token; self.bump(); - self.lit_from_token(tok) + self.lit_from_token(&tok) }; codemap::spanned { node: lit, span: mk_sp(lo, self.last_span.hi) } } @@ -1240,8 +1240,8 @@ pub impl Parser { if *self.token == token::NOT { self.bump(); match *self.token { - token::LPAREN | token::LBRACE => {} - _ => self.fatal(~"expected open delimiter") + token::LPAREN | token::LBRACE => {} + _ => self.fatal(~"expected open delimiter") }; let ket = token::flip_delimiter(&*self.token); @@ -2554,7 +2554,8 @@ pub impl Parser { self.expect(&token::LBRACE); let (inner, next) = maybe_parse_inner_attrs_and_next(self, parse_attrs); - return (inner, self.parse_block_tail_(lo, default_blk, next)); + + (inner, self.parse_block_tail_(lo, default_blk, next)) } fn parse_block_no_value() -> blk { @@ -2624,10 +2625,7 @@ pub impl Parser { fmt!( "expected `;` or `}` after \ expression but found `%s`", - token_to_str( - self.reader, - &t - ) + token_to_str(self.reader, &t) ) ); } @@ -2823,12 +2821,14 @@ pub impl Parser { self.bump(); } - fn parse_fn_decl_with_self(parse_arg_fn: - fn(Parser) -> arg_or_capture_item) - -> (self_ty, fn_decl) { - - fn maybe_parse_self_ty(cnstr: fn(+v: mutability) -> ast::self_ty_, - p: Parser) -> ast::self_ty_ { + fn parse_fn_decl_with_self( + parse_arg_fn: + fn(Parser) -> arg_or_capture_item + ) -> (self_ty, fn_decl) { + fn maybe_parse_self_ty( + cnstr: fn(+v: mutability) -> ast::self_ty_, + p: Parser + ) -> ast::self_ty_ { // We need to make sure it isn't a mode or a type if p.token_is_keyword(&~"self", &p.look_ahead(1)) || ((p.token_is_keyword(&~"const", &p.look_ahead(1)) || From 34c02a6c0ead2896ecc68aa92dd39cd62aee5aea Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 21:20:50 -0800 Subject: [PATCH 22/30] libsyntax: change Parser::unexpected_last to take &Token --- src/libsyntax/parse/common.rs | 4 ++-- src/libsyntax/parse/parser.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 4632ceb6e7d40..af3779b3f39c5 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -51,12 +51,12 @@ pub fn token_to_str(reader: reader, token: &token::Token) -> ~str { } pub impl Parser { - fn unexpected_last(t: token::Token) -> ! { + fn unexpected_last(t: &token::Token) -> ! { self.span_fatal( *self.last_span, fmt!( "unexpected token: `%s`", - token_to_str(self.reader, &t) + token_to_str(self.reader, t) ) ); } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 9b825512c39d9..7f49cf4ae18cc 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -634,7 +634,7 @@ pub impl Parser { |p| p.parse_ty_field() ); if elems.len() == 0 { - self.unexpected_last(token::RBRACE); + self.unexpected_last(&token::RBRACE); } ty_rec(elems) } else if *self.token == token::LBRACKET { @@ -868,7 +868,7 @@ pub impl Parser { lit_float_unsuffixed(self.id_to_str(s)), token::LIT_STR(s) => lit_str(self.id_to_str(s)), token::LPAREN => { self.expect(&token::RPAREN); lit_nil }, - _ => { self.unexpected_last(*tok); } + _ => { self.unexpected_last(tok); } } } From de6d9f66b514d6a0e6cc436a942218c3ab4c8e57 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 21:22:43 -0800 Subject: [PATCH 23/30] libsyntax: change token_is_word to take &Token --- src/libsyntax/parse/common.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index af3779b3f39c5..a426d6bba0503 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -137,7 +137,7 @@ pub impl Parser { } } - fn token_is_word(word: &~str, tok: &token::Token) -> bool { + pure fn token_is_word(word: &~str, tok: &token::Token) -> bool { match *tok { token::IDENT(sid, false) => { *self.id_to_str(sid) == *word } _ => { false } From 375c2982971662a26afda5e2aac437ccf81a9872 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 17:02:04 -0800 Subject: [PATCH 24/30] libsyntax: change binop_to_str to be pure --- src/libsyntax/parse/token.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index bb1f8f1d1d9a2..2b9df61120eeb 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -115,7 +115,7 @@ pub enum nonterminal { nt_matchers(~[ast::matcher]) } -pub fn binop_to_str(o: binop) -> ~str { +pub pure fn binop_to_str(o: binop) -> ~str { match o { PLUS => ~"+", MINUS => ~"-", From 8d239a256d39eb2527ddd40e34d14a2e1801fb61 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Tue, 26 Feb 2013 06:28:14 -0800 Subject: [PATCH 25/30] libsyntax: change closures to take fn(&Parser) --- src/libsyntax/parse/common.rs | 18 ++++++------- src/libsyntax/parse/parser.rs | 48 +++++++++++++++++------------------ 2 files changed, 33 insertions(+), 33 deletions(-) diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index a426d6bba0503..28c5bf721a187 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -250,7 +250,7 @@ pub impl Parser { // before the '>'. fn parse_seq_to_before_gt( sep: Option, - f: fn(Parser) -> T + f: fn(&Parser) -> T ) -> ~[T] { let mut first = true; let mut v = ~[]; @@ -263,7 +263,7 @@ pub impl Parser { } _ => () } - v.push(f(self)); + v.push(f(&self)); } return v; @@ -271,7 +271,7 @@ pub impl Parser { fn parse_seq_to_gt( sep: Option, - f: fn(Parser) -> T + f: fn(&Parser) -> T ) -> ~[T] { let v = self.parse_seq_to_before_gt(sep, f); self.expect_gt(); @@ -282,7 +282,7 @@ pub impl Parser { // parse a sequence bracketed by '<' and '>' fn parse_seq_lt_gt( sep: Option, - f: fn(Parser) -> T + f: fn(&Parser) -> T ) -> spanned<~[T]> { let lo = self.span.lo; self.expect(&token::LT); @@ -298,7 +298,7 @@ pub impl Parser { fn parse_seq_to_end( ket: &token::Token, sep: SeqSep, - f: fn(Parser) -> T + f: fn(&Parser) -> T ) -> ~[T] { let val = self.parse_seq_to_before_end(ket, sep, f); self.bump(); @@ -311,7 +311,7 @@ pub impl Parser { fn parse_seq_to_before_end( ket: &token::Token, sep: SeqSep, - f: fn(Parser) -> T + f: fn(&Parser) -> T ) -> ~[T] { let mut first: bool = true; let mut v: ~[T] = ~[]; @@ -324,7 +324,7 @@ pub impl Parser { _ => () } if sep.trailing_sep_allowed && *self.token == *ket { break; } - v.push(f(self)); + v.push(f(&self)); } return v; } @@ -336,7 +336,7 @@ pub impl Parser { bra: &token::Token, ket: &token::Token, sep: SeqSep, - f: fn(Parser) -> T + f: fn(&Parser) -> T ) -> ~[T] { self.expect(bra); let result = self.parse_seq_to_before_end(ket, sep, f); @@ -350,7 +350,7 @@ pub impl Parser { bra: &token::Token, ket: &token::Token, sep: SeqSep, - f: fn(Parser) -> T + f: fn(&Parser) -> T ) -> spanned<~[T]> { let lo = self.span.lo; self.expect(bra); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 7f49cf4ae18cc..694e83879588a 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -768,7 +768,7 @@ pub impl Parser { } } - fn parse_capture_item_or(parse_arg_fn: fn(Parser) -> arg_or_capture_item) + fn parse_capture_item_or(parse_arg_fn: fn(&Parser) -> arg_or_capture_item) -> arg_or_capture_item { if self.eat_keyword(&~"copy") { @@ -776,7 +776,7 @@ pub impl Parser { self.parse_ident(); either::Right(()) } else { - parse_arg_fn(self) + parse_arg_fn(&self) } } @@ -893,8 +893,8 @@ pub impl Parser { } fn parse_path_without_tps_( - parse_ident: fn(Parser) -> ident, - parse_last_ident: fn(Parser) -> ident) -> @path { + parse_ident: fn(&Parser) -> ident, + parse_last_ident: fn(&Parser) -> ident) -> @path { maybe_whole!(self, nt_path); let lo = self.span.lo; @@ -906,10 +906,10 @@ pub impl Parser { && self.look_ahead(1u) == token::MOD_SEP; if is_not_last { - ids.push(parse_ident(self)); + ids.push(parse_ident(&self)); self.expect(&token::MOD_SEP); } else { - ids.push(parse_last_ident(self)); + ids.push(parse_last_ident(&self)); break; } } @@ -1415,7 +1415,7 @@ pub impl Parser { fn parse_token_tree() -> token_tree { maybe_whole!(deref self, nt_tt); - fn parse_non_delim_tt_tok(p: Parser) -> token_tree { + fn parse_non_delim_tt_tok(p: &Parser) -> token_tree { maybe_whole!(deref p, nt_tt); match *p.token { token::RPAREN | token::RBRACE | token::RBRACKET @@ -1452,7 +1452,7 @@ pub impl Parser { } // turn the next token into a tt_tok: - fn parse_any_tt_tok(p: Parser) -> token_tree{ + fn parse_any_tt_tok(p: &Parser) -> token_tree{ let res = tt_tok(*p.span, *p.token); p.bump(); res @@ -1468,7 +1468,7 @@ pub impl Parser { tt_delim( vec::append( // the open delimiter: - ~[parse_any_tt_tok(self)], + ~[parse_any_tt_tok(&self)], vec::append( self.parse_seq_to_before_end( &ket, @@ -1476,12 +1476,12 @@ pub impl Parser { |p| p.parse_token_tree() ), // the close delimiter: - ~[parse_any_tt_tok(self)] + ~[parse_any_tt_tok(&self)] ) ) ) } - _ => parse_non_delim_tt_tok(self) + _ => parse_non_delim_tt_tok(&self) } } @@ -2441,7 +2441,7 @@ pub impl Parser { fn parse_stmt(+first_item_attrs: ~[attribute]) -> @stmt { maybe_whole!(self, nt_stmt); - fn check_expected_item(p: Parser, current_attrs: ~[attribute]) { + fn check_expected_item(p: &Parser, current_attrs: ~[attribute]) { // If we have attributes then we should have an item if !current_attrs.is_empty() { p.fatal(~"expected item after attrs"); @@ -2450,7 +2450,7 @@ pub impl Parser { let lo = self.span.lo; if self.is_keyword(&~"let") { - check_expected_item(self, first_item_attrs); + check_expected_item(&self, first_item_attrs); self.expect_keyword(&~"let"); let decl = self.parse_let(); return @spanned(lo, decl.span.hi, stmt_decl(decl, self.get_id())); @@ -2458,7 +2458,7 @@ pub impl Parser { && !self.is_any_keyword(© *self.token) && self.look_ahead(1) == token::NOT { - check_expected_item(self, first_item_attrs); + check_expected_item(&self, first_item_attrs); // Potential trouble: if we allow macros with paths instead of // idents, we'd need to look ahead past the whole path here... @@ -2514,7 +2514,7 @@ pub impl Parser { iovi_none() => { /* fallthrough */ } } - check_expected_item(self, item_attrs); + check_expected_item(&self, item_attrs); // Remainder are line-expr stmts. let e = self.parse_expr_res(RESTRICT_STMT_EXPR); @@ -2538,7 +2538,7 @@ pub impl Parser { maybe_whole!(pair_empty self, nt_block); - fn maybe_parse_inner_attrs_and_next(p: Parser, parse_attrs: bool) -> + fn maybe_parse_inner_attrs_and_next(p: &Parser, parse_attrs: bool) -> (~[attribute], ~[attribute]) { if parse_attrs { p.parse_inner_attrs_and_next() @@ -2553,7 +2553,7 @@ pub impl Parser { } self.expect(&token::LBRACE); let (inner, next) = - maybe_parse_inner_attrs_and_next(self, parse_attrs); + maybe_parse_inner_attrs_and_next(&self, parse_attrs); (inner, self.parse_block_tail_(lo, default_blk, next)) } @@ -2780,7 +2780,7 @@ pub impl Parser { } else { ~[] } } - fn parse_fn_decl(parse_arg_fn: fn(Parser) -> arg_or_capture_item) + fn parse_fn_decl(parse_arg_fn: fn(&Parser) -> arg_or_capture_item) -> fn_decl { let args_or_capture_items: ~[arg_or_capture_item] = @@ -2823,11 +2823,11 @@ pub impl Parser { fn parse_fn_decl_with_self( parse_arg_fn: - fn(Parser) -> arg_or_capture_item + fn(&Parser) -> arg_or_capture_item ) -> (self_ty, fn_decl) { fn maybe_parse_self_ty( cnstr: fn(+v: mutability) -> ast::self_ty_, - p: Parser + p: &Parser ) -> ast::self_ty_ { // We need to make sure it isn't a mode or a type if p.token_is_keyword(&~"self", &p.look_ahead(1)) || @@ -2851,13 +2851,13 @@ pub impl Parser { let lo = self.span.lo; let self_ty = match *self.token { token::BINOP(token::AND) => { - maybe_parse_self_ty(sty_region, self) + maybe_parse_self_ty(sty_region, &self) } token::AT => { - maybe_parse_self_ty(sty_box, self) + maybe_parse_self_ty(sty_box, &self) } token::TILDE => { - maybe_parse_self_ty(sty_uniq, self) + maybe_parse_self_ty(sty_uniq, &self) } token::IDENT(*) if self.is_self_ident() => { self.bump(); @@ -3028,7 +3028,7 @@ pub impl Parser { // impl ~[T] : to_str { ... } // impl to_str for ~[T] { ... } fn parse_item_impl() -> item_info { - fn wrap_path(p: Parser, pt: @path) -> @Ty { + fn wrap_path(p: &Parser, pt: @path) -> @Ty { @Ty { id: p.get_id(), node: ty_path(pt, p.get_id()), From 4ae91e2961ac7be50a346a6b0d724601878a9cd0 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Tue, 26 Feb 2013 06:35:36 -0800 Subject: [PATCH 26/30] libsyntax: add explicit copies --- src/libsyntax/parse/attr.rs | 12 +++- src/libsyntax/parse/comments.rs | 10 +-- src/libsyntax/parse/mod.rs | 21 ++++--- src/libsyntax/parse/parser.rs | 105 ++++++++++++++++++++------------ src/libsyntax/parse/token.rs | 14 ++--- 5 files changed, 101 insertions(+), 61 deletions(-) diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 56e350db47ba1..a52a9f3ba5e93 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -46,7 +46,10 @@ impl parser_attr for Parser { } token::DOC_COMMENT(s) => { let attr = ::attr::mk_sugared_doc_attr( - *self.id_to_str(s), self.span.lo, self.span.hi); + copy *self.id_to_str(s), + self.span.lo, + self.span.hi + ); if attr.node.style != ast::attr_outer { self.fatal(~"expected outer comment"); } @@ -113,7 +116,10 @@ impl parser_attr for Parser { } token::DOC_COMMENT(s) => { let attr = ::attr::mk_sugared_doc_attr( - *self.id_to_str(s), self.span.lo, self.span.hi); + copy *self.id_to_str(s), + self.span.lo, + self.span.hi + ); self.bump(); if attr.node.style == ast::attr_inner { inner_attrs += ~[attr]; @@ -151,7 +157,7 @@ impl parser_attr for Parser { } fn parse_meta_seq() -> ~[@ast::meta_item] { - self.parse_seq( + copy self.parse_seq( &token::LPAREN, &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index 4960563db8875..714ae9a0fd520 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -221,7 +221,7 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut ~[~str], if col < len { s1 = str::slice(s, col, len); } else { s1 = ~""; } - } else { s1 = s; } + } else { s1 = /*bad*/ copy s; } log(debug, ~"pushing line: " + s1); lines.push(s1); } @@ -321,7 +321,7 @@ pub struct lit { } pub fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, - path: ~str, + +path: ~str, srdr: io::Reader) -> (~[cmnt], ~[lit]) { let src = @str::from_bytes(srdr.read_whole_stream()); let itr = parse::token::mk_fake_ident_interner(); @@ -356,10 +356,10 @@ pub fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, let TokenAndSpan {tok: tok, sp: sp} = rdr.peek(); if token::is_lit(&tok) { let s = get_str_from(rdr, bstart); - literals.push(lit {lit: s, pos: sp.lo}); - log(debug, ~"tok lit: " + s); + literals.push(lit {lit: /*bad*/ copy s, pos: sp.lo}); + debug!("tok lit: %s", s); } else { - log(debug, ~"tok: " + token::to_str(rdr.interner, &tok)); + debug!("tok: %s", token::to_str(rdr.interner, &tok)); } first_read = false; } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 5fa6115938506..cf5cb847018ac 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -92,7 +92,7 @@ pub fn parse_crate_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, sess: @mut ParseSess) -> @ast::crate { - let p = new_parser_from_source_str(sess, cfg, name, + let p = new_parser_from_source_str(sess, cfg, /*bad*/ copy name, codemap::FssNone, source); let r = p.parse_crate_mod(cfg); p.abort_if_errors(); @@ -103,7 +103,7 @@ pub fn parse_expr_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, sess: @mut ParseSess) -> @ast::expr { - let p = new_parser_from_source_str(sess, cfg, name, + let p = new_parser_from_source_str(sess, cfg, /*bad*/ copy name, codemap::FssNone, source); let r = p.parse_expr(); p.abort_if_errors(); @@ -116,7 +116,7 @@ pub fn parse_item_from_source_str(name: ~str, +attrs: ~[ast::attribute], sess: @mut ParseSess) -> Option<@ast::item> { - let p = new_parser_from_source_str(sess, cfg, name, + let p = new_parser_from_source_str(sess, cfg, /*bad*/ copy name, codemap::FssNone, source); let r = p.parse_item(attrs); p.abort_if_errors(); @@ -128,7 +128,7 @@ pub fn parse_stmt_from_source_str(name: ~str, cfg: ast::crate_cfg, +attrs: ~[ast::attribute], sess: @mut ParseSess) -> @ast::stmt { - let p = new_parser_from_source_str(sess, cfg, name, + let p = new_parser_from_source_str(sess, cfg, /*bad*/ copy name, codemap::FssNone, source); let r = p.parse_stmt(attrs); p.abort_if_errors(); @@ -139,7 +139,7 @@ pub fn parse_tts_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, sess: @mut ParseSess) -> ~[ast::token_tree] { - let p = new_parser_from_source_str(sess, cfg, name, + let p = new_parser_from_source_str(sess, cfg, /*bad*/ copy name, codemap::FssNone, source); *p.quote_depth += 1u; let r = p.parse_all_token_trees(); @@ -153,8 +153,13 @@ pub fn parse_from_source_str(f: fn (p: Parser) -> T, sess: @mut ParseSess) -> T { - let p = new_parser_from_source_str(sess, cfg, name, ss, - source); + let p = new_parser_from_source_str( + sess, + cfg, + /*bad*/ copy name, + /*bad*/ copy ss, + source + ); let r = f(p); if !p.reader.is_eof() { p.reader.fatal(~"expected end-of-string"); @@ -226,7 +231,7 @@ pub fn new_sub_parser_from_file(sess: @mut ParseSess, cfg: ast::crate_cfg, } pub fn new_parser_from_tts(sess: @mut ParseSess, cfg: ast::crate_cfg, - tts: ~[ast::token_tree]) -> Parser { + +tts: ~[ast::token_tree]) -> Parser { let trdr = lexer::new_tt_reader(copy sess.span_diagnostic, sess.interner, None, tts); return Parser(sess, cfg, trdr as reader) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 694e83879588a..2c7947090c243 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -147,16 +147,25 @@ macro_rules! maybe_whole_expr ( ) macro_rules! maybe_whole ( - ($p:expr, $constructor:ident) => ( match *$p.token { - INTERPOLATED(token::$constructor(x)) => { $p.bump(); return x; } + ($p:expr, $constructor:ident) => ( match copy *$p.token { + INTERPOLATED(token::$constructor(x)) => { + $p.bump(); + return x; + } _ => () }) ; - (deref $p:expr, $constructor:ident) => ( match *$p.token { - INTERPOLATED(token::$constructor(x)) => { $p.bump(); return *x; } + (deref $p:expr, $constructor:ident) => ( match copy *$p.token { + INTERPOLATED(token::$constructor(x)) => { + $p.bump(); + return copy *x; + } _ => () }) ; - (Some $p:expr, $constructor:ident) => ( match *$p.token { - INTERPOLATED(token::$constructor(x)) => { $p.bump(); return Some(x); } + (Some $p:expr, $constructor:ident) => ( match copy *$p.token { + INTERPOLATED(token::$constructor(x)) => { + $p.bump(); + return Some(x); + } _ => () }) ; (iovi $p:expr, $constructor:ident) => ( match *$p.token { @@ -166,9 +175,10 @@ macro_rules! maybe_whole ( } _ => () }) ; - (pair_empty $p:expr, $constructor:ident) => ( match *$p.token { + (pair_empty $p:expr, $constructor:ident) => ( match copy *$p.token { INTERPOLATED(token::$constructor(x)) => { - $p.bump(); return (~[], x); + $p.bump(); + return (~[], x); } _ => () }) @@ -612,8 +622,11 @@ pub impl Parser { one_tuple = true; } } - let t = if ts.len() == 1 && !one_tuple { ts[0].node } - else { ty_tup(ts) }; + let t = if ts.len() == 1 && !one_tuple { + copy ts[0].node + } else { + ty_tup(ts) + }; self.expect(&token::RPAREN); t } @@ -972,7 +985,7 @@ pub impl Parser { @ast::path { span: mk_sp(lo, hi), rp: rp, types: tps, - .. *path } + .. copy *path } } fn parse_opt_lifetime() -> Option { @@ -1440,7 +1453,12 @@ pub impl Parser { |p| p.parse_token_tree() ); let (s, z) = p.parse_sep_and_zerok(); - tt_seq(mk_sp(sp.lo ,p.span.hi), seq.node, s, z) + tt_seq( + mk_sp(sp.lo ,p.span.hi), + /*bad*/ copy seq.node, + s, + z + ) } else { tt_nonterminal(sp, p.parse_ident()) } @@ -1453,7 +1471,7 @@ pub impl Parser { // turn the next token into a tt_tok: fn parse_any_tt_tok(p: &Parser) -> token_tree{ - let res = tt_tok(*p.span, *p.token); + let res = tt_tok(*p.span, copy *p.token); p.bump(); res } @@ -1562,7 +1580,7 @@ pub impl Parser { m } } else { - let m = match_tok(*self.token); + let m = match_tok(copy *self.token); self.bump(); m }; @@ -1665,7 +1683,7 @@ pub impl Parser { fn parse_more_binops(lhs: @expr, min_prec: uint) -> @expr { if self.expr_is_complete(lhs) { return lhs; } - let peeked = *self.token; + let peeked = copy *self.token; if peeked == token::BINOP(token::OR) && (*self.restriction == RESTRICT_NO_BAR_OP || *self.restriction == RESTRICT_NO_BAR_OR_DOUBLEBAR_OP) { @@ -1859,7 +1877,7 @@ pub impl Parser { // Turn on the restriction to stop at | or || so we can parse // them as the lambda arguments let e = self.parse_expr_res(RESTRICT_NO_BAR_OR_DOUBLEBAR_OP); - match e.node { + match /*bad*/ copy e.node { expr_call(f, args, NoSugar) => { let block = self.parse_lambda_block_expr(); let last_arg = self.mk_expr(block.span.lo, block.span.hi, @@ -2441,7 +2459,7 @@ pub impl Parser { fn parse_stmt(+first_item_attrs: ~[attribute]) -> @stmt { maybe_whole!(self, nt_stmt); - fn check_expected_item(p: &Parser, current_attrs: ~[attribute]) { + fn check_expected_item(p: &Parser, current_attrs: &[attribute]) { // If we have attributes then we should have an item if !current_attrs.is_empty() { p.fatal(~"expected item after attrs"); @@ -2497,7 +2515,7 @@ pub impl Parser { let item_attrs = vec::append(first_item_attrs, self.parse_outer_attributes()); - match self.parse_item_or_view_item(item_attrs, + match self.parse_item_or_view_item(/*bad*/ copy item_attrs, true, false, false) { iovi_item(i) => { let mut hi = i.span.hi; @@ -2614,7 +2632,7 @@ pub impl Parser { self.bump(); stmts.push(@codemap::spanned { node: stmt_semi(e, stmt_id), - .. *stmt}); + .. copy *stmt}); } token::RBRACE => { expr = Some(e); @@ -2640,8 +2658,8 @@ pub impl Parser { token::SEMI => { self.bump(); stmts.push(@codemap::spanned { - node: stmt_mac((*m), true), - .. *stmt}); + node: stmt_mac(copy *m, true), + .. copy *stmt}); } token::RBRACE => { // if a block ends in `m!(arg)` without @@ -2649,7 +2667,7 @@ pub impl Parser { expr = Some( self.mk_mac_expr(stmt.span.lo, stmt.span.hi, - (*m).node)); + copy m.node)); } _ => { stmts.push(stmt); } } @@ -2990,6 +3008,7 @@ pub impl Parser { let self_ty = if is_static { static_sty} else { self_ty }; let (inner_attrs, body) = self.parse_inner_attrs_and_block(true); + let hi = body.span.hi; let attrs = vec::append(attrs, inner_attrs); @ast::method { ident: ident, @@ -3000,7 +3019,7 @@ pub impl Parser { decl: decl, body: body, id: self.get_id(), - span: mk_sp(lo, body.span.hi), + span: mk_sp(lo, hi), self_id: self.get_id(), vis: visa, } @@ -3161,7 +3180,7 @@ pub impl Parser { declared here"); } None => { - the_dtor = Some(((*blk), (*attrs), s)); + the_dtor = Some((copy *blk, copy *attrs, s)); } } } @@ -3205,7 +3224,7 @@ pub impl Parser { } let actual_dtor = do the_dtor.map |dtor| { - let (d_body, d_attrs, d_s) = *dtor; + let (d_body, d_attrs, d_s) = copy *dtor; codemap::spanned { node: ast::struct_dtor_ { id: self.get_id(), attrs: d_attrs, self_id: self.get_id(), @@ -3257,7 +3276,7 @@ pub impl Parser { a_var } - fn parse_dtor(attrs: ~[attribute]) -> class_contents { + fn parse_dtor(+attrs: ~[attribute]) -> class_contents { let lo = self.last_span.lo; let body = self.parse_block(); dtor_decl(body, attrs, mk_sp(lo, self.last_span.hi)) @@ -3323,12 +3342,17 @@ pub impl Parser { while *self.token != term { let mut attrs = self.parse_outer_attributes(); if first { - attrs = vec::append(attrs_remaining, attrs); + attrs = vec::append(/*bad*/ copy attrs_remaining, attrs); first = false; } debug!("parse_mod_items: parse_item_or_view_item(attrs=%?)", attrs); - match self.parse_item_or_view_item(attrs, true, false, true) { + match self.parse_item_or_view_item( + /*bad*/ copy attrs, + true, + false, + true + ) { iovi_item(item) => items.push(item), iovi_view_item(view_item) => { self.span_fatal(view_item.span, ~"view items must be \ @@ -3456,7 +3480,7 @@ pub impl Parser { outer_attrs, id_sp) } - fn eval_src_mod_from_path(prefix: Path, path: Path, + fn eval_src_mod_from_path(prefix: Path, +path: Path, outer_attrs: ~[ast::attribute], id_sp: span ) -> (ast::item_, ~[ast::attribute]) { @@ -3471,12 +3495,15 @@ pub impl Parser { new_sub_parser_from_file(self.sess, self.cfg, &full_path, id_sp); let (inner, next) = p0.parse_inner_attrs_and_next(); - let mod_attrs = vec::append(outer_attrs, inner); + let mod_attrs = vec::append( + /*bad*/ copy outer_attrs, + inner + ); let first_item_outer_attrs = next; let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs); return (ast::item_mod(m0), mod_attrs); - fn cdir_path_opt(default: ~str, attrs: ~[ast::attribute]) -> ~str { + fn cdir_path_opt(+default: ~str, attrs: ~[ast::attribute]) -> ~str { match ::attr::first_attr_value_str_by_name(attrs, ~"path") { Some(d) => copy *d, None => default @@ -3631,8 +3658,9 @@ pub impl Parser { self.expect(&token::RBRACE); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, - item_foreign_mod(m), visibility, - maybe_append(attrs, Some(inner)))); + item_foreign_mod(m), visibility, + maybe_append(/*bad*/ copy attrs, + Some(inner)))); } match abi_opt { @@ -3648,7 +3676,7 @@ pub impl Parser { self.expect(&token::SEMI); iovi_view_item(@ast::view_item { node: view_item_extern_mod(ident, metadata, self.get_id()), - attrs: attrs, + attrs: copy attrs, vis: visibility, span: mk_sp(lo, self.last_span.hi) }) @@ -3691,7 +3719,7 @@ pub impl Parser { declared here"); } None => { - the_dtor = Some(((*blk), (*attrs), s)); + the_dtor = Some((copy *blk, copy *attrs, s)); } } } @@ -3704,7 +3732,7 @@ pub impl Parser { } self.bump(); let mut actual_dtor = do the_dtor.map |dtor| { - let (d_body, d_attrs, d_s) = *dtor; + let (d_body, d_attrs, d_s) = copy *dtor; codemap::spanned { node: ast::struct_dtor_ { id: self.get_id(), attrs: d_attrs, self_id: self.get_id(), @@ -4216,7 +4244,8 @@ pub impl Parser { let mut (view_items, items, foreign_items) = (~[], ~[], ~[]); loop { - match self.parse_item_or_view_item(attrs, items_allowed, + match self.parse_item_or_view_item(/*bad*/ copy attrs, + items_allowed, foreign_items_allowed, macros_allowed) { iovi_none => @@ -4265,7 +4294,7 @@ pub impl Parser { @spanned(lo, self.span.lo, ast::crate_ { module: m, attrs: inner, - config: self.cfg }) + config: copy self.cfg }) } fn parse_str() -> @~str { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 2b9df61120eeb..8b063314c9b07 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -179,14 +179,14 @@ pub fn to_str(in: @ident_interner, t: &Token) -> ~str { } LIT_INT_UNSUFFIXED(i) => { i.to_str() } LIT_FLOAT(s, t) => { - let mut body = *in.get(s); + let mut body = copy *in.get(s); if body.ends_with(~".") { body = body + ~"0"; // `10.f` is not a float literal } body + ast_util::float_ty_to_str(t) } LIT_FLOAT_UNSUFFIXED(s) => { - let mut body = *in.get(s); + let mut body = copy *in.get(s); if body.ends_with(~".") { body = body + ~"0"; // `10.f` is not a float literal } @@ -195,12 +195,12 @@ pub fn to_str(in: @ident_interner, t: &Token) -> ~str { LIT_STR(s) => { ~"\"" + str::escape_default(*in.get(s)) + ~"\"" } /* Name components */ - IDENT(s, _) => *in.get(s), + IDENT(s, _) => copy *in.get(s), LIFETIME(s) => fmt!("'%s", *in.get(s)), UNDERSCORE => ~"_", /* Other */ - DOC_COMMENT(s) => *in.get(s), + DOC_COMMENT(s) => copy *in.get(s), EOF => ~"", INTERPOLATED(ref nt) => { match nt { @@ -476,7 +476,7 @@ pub fn temporary_keyword_table() -> HashMap<~str, ()> { ~"self", ~"static", ]; for keys.each |word| { - words.insert(*word, ()); + words.insert(copy *word, ()); } words } @@ -503,7 +503,7 @@ pub fn strict_keyword_table() -> HashMap<~str, ()> { ~"while" ]; for keys.each |word| { - words.insert(*word, ()); + words.insert(copy *word, ()); } words } @@ -514,7 +514,7 @@ pub fn reserved_keyword_table() -> HashMap<~str, ()> { ~"be" ]; for keys.each |word| { - words.insert(*word, ()); + words.insert(copy *word, ()); } words } From 5f1652f34fee38f3d88f5944c86f159c0f7d7fee Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Tue, 26 Feb 2013 07:43:53 -0800 Subject: [PATCH 27/30] libsyntax: remove vecs_implicitly_copyable from the printer --- src/libsyntax/print/pp.rs | 6 ++--- src/libsyntax/print/pprust.rs | 45 ++++++++++++++++++----------------- 2 files changed, 26 insertions(+), 25 deletions(-) diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 402c7c2663465..1dc45b1a4bd9a 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -568,15 +568,15 @@ pub fn end(p: @mut Printer) { p.pretty_print(END); } pub fn eof(p: @mut Printer) { p.pretty_print(EOF); } pub fn word(p: @mut Printer, wrd: ~str) { - p.pretty_print(STRING(@wrd, str::len(wrd) as int)); + p.pretty_print(STRING(@/*bad*/ copy wrd, wrd.len() as int)); } pub fn huge_word(p: @mut Printer, wrd: ~str) { - p.pretty_print(STRING(@wrd, size_infinity)); + p.pretty_print(STRING(@/*bad*/ copy wrd, size_infinity)); } pub fn zero_word(p: @mut Printer, wrd: ~str) { - p.pretty_print(STRING(@wrd, 0)); + p.pretty_print(STRING(@/*bad*/ copy wrd, 0)); } pub fn spaces(p: @mut Printer, n: uint) { break_offset(p, n, 0); } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 7cf297b324bda..e98629a46a528 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -108,18 +108,20 @@ pub fn print_crate(cm: @CodeMap, intr: @ident_interner, span_diagnostic: diagnostic::span_handler, crate: @ast::crate, filename: ~str, in: io::Reader, out: io::Writer, ann: pp_ann, is_expanded: bool) { - let (cmnts, lits) = - comments::gather_comments_and_literals(span_diagnostic, - filename, in); + let (cmnts, lits) = comments::gather_comments_and_literals( + span_diagnostic, + copy filename, + in + ); let s = @ps { s: pp::mk_printer(out, default_columns), cm: Some(cm), intr: intr, - comments: Some(cmnts), + comments: Some(copy cmnts), // If the code is post expansion, don't use the table of // literals, since it doesn't correspond with the literals // in the AST anymore. - literals: if is_expanded { None } else { Some(lits) }, + literals: if is_expanded { None } else { Some(copy lits) }, cur_cmnt_and_lit: @mut CurrentCommentAndLiteral { cur_cmnt: 0, cur_lit: 0 @@ -378,7 +380,7 @@ pub fn print_type(s: @ps, &&ty: @ast::Ty) { pub fn print_type_ex(s: @ps, &&ty: @ast::Ty, print_colons: bool) { maybe_print_comment(s, ty.span.lo); ibox(s, 0u); - match ty.node { + match /*bad*/ copy ty.node { ast::ty_nil => word(s.s, ~"()"), ast::ty_bot => word(s.s, ~"!"), ast::ty_box(mt) => { word(s.s, ~"@"); print_mt(s, mt); } @@ -458,7 +460,7 @@ pub fn print_foreign_item(s: @ps, item: @ast::foreign_item) { hardbreak_if_not_bol(s); maybe_print_comment(s, item.span.lo); print_outer_attributes(s, item.attrs); - match item.node { + match /*bad*/ copy item.node { ast::foreign_item_fn(decl, purity, typarams) => { print_fn(s, decl, Some(purity), item.ident, typarams, None, ast::inherited); @@ -484,7 +486,7 @@ pub fn print_item(s: @ps, &&item: @ast::item) { print_outer_attributes(s, item.attrs); let ann_node = node_item(s, item); (s.ann.pre)(ann_node); - match item.node { + match /*bad*/ copy item.node { ast::item_const(ty, expr) => { head(s, visibility_qualified(item.vis, ~"const")); print_ident(s, item.ident); @@ -652,7 +654,7 @@ pub fn print_enum_def(s: @ps, enum_definition: ast::enum_def, space(s.s); if newtype { word_space(s, ~"="); - match enum_definition.variants[0].node.kind { + match /*bad*/ copy enum_definition.variants[0].node.kind { ast::tuple_variant_kind(args) => print_type(s, args[0].ty), _ => fail!(~"newtype syntax with struct?") } @@ -690,9 +692,8 @@ pub fn visibility_to_str(vis: ast::visibility) -> ~str { pub fn visibility_qualified(vis: ast::visibility, s: ~str) -> ~str { match vis { - ast::private | ast::public => - visibility_to_str(vis) + " " + s, - ast::inherited => s + ast::private | ast::public => visibility_to_str(vis) + " " + s, + ast::inherited => copy s } } @@ -809,7 +810,7 @@ pub fn print_tts(s: @ps, &&tts: &[ast::token_tree]) { pub fn print_variant(s: @ps, v: ast::variant) { print_visibility(s, v.node.vis); - match v.node.kind { + match /*bad*/ copy v.node.kind { ast::tuple_variant_kind(args) => { print_ident(s, v.node.name); if !args.is_empty() { @@ -844,8 +845,8 @@ pub fn print_ty_method(s: @ps, m: &ast::ty_method) { maybe_print_comment(s, m.span.lo); print_outer_attributes(s, m.attrs); print_ty_fn(s, None, None, None, m.purity, ast::Many, - m.decl, Some(m.ident), Some(m.tps), - Some(m.self_ty.node)); + m.decl, Some(m.ident), Some(/*bad*/ copy m.tps), + Some(/*bad*/ copy m.self_ty.node)); word(s.s, ~";"); } @@ -1141,7 +1142,7 @@ pub fn print_expr(s: @ps, &&expr: @ast::expr) { ibox(s, indent_unit); let ann_node = node_expr(s, expr); (s.ann.pre)(ann_node); - match expr.node { + match /*bad*/ copy expr.node { ast::expr_vstore(e, v) => match v { ast::expr_vstore_fixed(_) => { print_expr(s, e); @@ -1490,7 +1491,7 @@ pub fn print_local_decl(s: @ps, loc: @ast::local) { pub fn print_decl(s: @ps, decl: @ast::decl) { maybe_print_comment(s, decl.span.lo); - match decl.node { + match /*bad*/ copy decl.node { ast::decl_local(locs) => { space_if_not_bol(s); ibox(s, indent_unit); @@ -1574,7 +1575,7 @@ pub fn print_pat(s: @ps, &&pat: @ast::pat, refutable: bool) { (s.ann.pre)(ann_node); /* Pat isn't normalized, but the beauty of it is that it doesn't matter */ - match pat.node { + match /*bad*/ copy pat.node { ast::pat_wild => word(s.s, ~"_"), ast::pat_ident(binding_mode, path, sub) => { if refutable { @@ -1886,7 +1887,7 @@ pub fn print_view_item(s: @ps, item: @ast::view_item) { maybe_print_comment(s, item.span.lo); print_outer_attributes(s, item.attrs); print_visibility(s, item.vis); - match item.node { + match /*bad*/ copy item.node { ast::view_item_extern_mod(id, mta, _) => { head(s, ~"extern mod"); print_ident(s, id); @@ -1968,7 +1969,7 @@ pub fn print_ty_fn(s: @ps, print_onceness(s, onceness); word(s.s, ~"fn"); match id { Some(id) => { word(s.s, ~" "); print_ident(s, id); } _ => () } - match tps { Some(tps) => print_type_params(s, tps), _ => () } + match /*bad*/ copy tps { Some(tps) => print_type_params(s, tps), _ => () } zerobreak(s.s); popen(s); @@ -2095,7 +2096,7 @@ pub fn next_lit(s: @ps, pos: BytePos) -> Option { match s.literals { Some(ref lits) => { while s.cur_cmnt_and_lit.cur_lit < vec::len((*lits)) { - let ltrl = (*lits)[s.cur_cmnt_and_lit.cur_lit]; + let ltrl = /*bad*/ copy (*lits)[s.cur_cmnt_and_lit.cur_lit]; if ltrl.pos > pos { return None; } s.cur_cmnt_and_lit.cur_lit += 1u; if ltrl.pos == pos { return Some(ltrl); } @@ -2182,7 +2183,7 @@ pub fn next_comment(s: @ps) -> Option { match s.comments { Some(ref cmnts) => { if s.cur_cmnt_and_lit.cur_cmnt < vec::len((*cmnts)) { - return Some((*cmnts)[s.cur_cmnt_and_lit.cur_cmnt]); + return Some(copy cmnts[s.cur_cmnt_and_lit.cur_cmnt]); } else { return None::; } } _ => return None:: From 5271464cc0aae453e96ad70a33e0c9636df7f90e Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 21:27:51 -0800 Subject: [PATCH 28/30] libsyntax: remove vecs_implicitly_copyable from the syntax extensions --- src/libsyntax/ext/auto_encode.rs | 34 +++++++------ src/libsyntax/ext/base.rs | 15 +++--- src/libsyntax/ext/build.rs | 68 ++++++++++++++------------ src/libsyntax/ext/deriving.rs | 4 +- src/libsyntax/ext/env.rs | 4 +- src/libsyntax/ext/expand.rs | 27 +++++----- src/libsyntax/ext/fmt.rs | 13 +++-- src/libsyntax/ext/pipes/ast_builder.rs | 16 +++--- src/libsyntax/ext/pipes/liveness.rs | 2 +- src/libsyntax/ext/pipes/mod.rs | 4 +- src/libsyntax/ext/pipes/parse_proto.rs | 10 ++-- src/libsyntax/ext/pipes/pipec.rs | 8 +-- src/libsyntax/ext/pipes/proto.rs | 8 +-- src/libsyntax/ext/quote.rs | 8 +-- src/libsyntax/ext/tt/macro_parser.rs | 13 ++--- src/libsyntax/ext/tt/macro_rules.rs | 6 +-- src/libsyntax/ext/tt/transcribe.rs | 35 +++++++------ 17 files changed, 151 insertions(+), 124 deletions(-) diff --git a/src/libsyntax/ext/auto_encode.rs b/src/libsyntax/ext/auto_encode.rs index 27e76a8ce7a3b..ea8678ed208dc 100644 --- a/src/libsyntax/ext/auto_encode.rs +++ b/src/libsyntax/ext/auto_encode.rs @@ -143,8 +143,8 @@ pub fn expand_auto_encode( cx, item.span, item.ident, - *enum_def, - *tps + copy *enum_def, + copy *tps ); ~[filter_attrs(*item), ser_impl] @@ -188,7 +188,7 @@ pub fn expand_auto_decode( item.span, item.ident, struct_def.fields, - *tps + copy *tps ); ~[filter_attrs(*item), deser_impl] @@ -198,8 +198,8 @@ pub fn expand_auto_decode( cx, item.span, item.ident, - *enum_def, - *tps + copy *enum_def, + copy *tps ); ~[filter_attrs(*item), deser_impl] @@ -346,7 +346,7 @@ priv impl ext_ctxt { fn lambda(+blk: ast::blk) -> @ast::expr { let ext_cx = self; - let blk_e = self.expr(blk.span, ast::expr_block(blk)); + let blk_e = self.expr(copy blk.span, ast::expr_block(copy blk)); quote_expr!( || $blk_e ) } @@ -840,14 +840,14 @@ fn mk_enum_ser_impl( cx: ext_ctxt, span: span, ident: ast::ident, - enum_def: ast::enum_def, + +enum_def: ast::enum_def, tps: ~[ast::ty_param] ) -> @ast::item { let body = mk_enum_ser_body( cx, span, ident, - enum_def.variants + copy enum_def.variants ); mk_ser_impl(cx, span, ident, tps, body) @@ -857,7 +857,7 @@ fn mk_enum_deser_impl( cx: ext_ctxt, span: span, ident: ast::ident, - enum_def: ast::enum_def, + +enum_def: ast::enum_def, tps: ~[ast::ty_param] ) -> @ast::item { let body = mk_enum_deser_body( @@ -960,8 +960,14 @@ fn mk_enum_ser_body( ) -> @ast::expr { let arms = do variants.mapi |v_idx, variant| { match variant.node.kind { - ast::tuple_variant_kind(args) => - ser_variant(cx, span, variant.node.name, v_idx, args), + ast::tuple_variant_kind(ref args) => + ser_variant( + cx, + span, + variant.node.name, + v_idx, + /*bad*/ copy *args + ), ast::struct_variant_kind(*) => fail!(~"struct variants unimplemented"), ast::enum_variant_kind(*) => @@ -1041,7 +1047,7 @@ fn mk_enum_deser_body( ) -> @ast::expr { let mut arms = do variants.mapi |v_idx, variant| { let body = match variant.node.kind { - ast::tuple_variant_kind(args) => { + ast::tuple_variant_kind(ref args) => { if args.is_empty() { // for a nullary variant v, do "v" ext_cx.expr_path(span, ~[variant.node.name]) @@ -1051,7 +1057,7 @@ fn mk_enum_deser_body( ext_cx, span, variant.node.name, - args + copy *args ) } }, @@ -1074,7 +1080,7 @@ fn mk_enum_deser_body( } }; - let quoted_expr = quote_expr!( + let quoted_expr = copy quote_expr!( ::core::sys::begin_unwind(~"explicit failure", ~"empty", 1); ).node; diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index b7641e8b19b9a..9d597b539bbf2 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -192,7 +192,7 @@ pub trait ext_ctxt { } pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, - cfg: ast::crate_cfg) -> ext_ctxt { + +cfg: ast::crate_cfg) -> ext_ctxt { struct CtxtRepr { parse_sess: @mut parse::ParseSess, cfg: ast::crate_cfg, @@ -203,7 +203,7 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, impl ext_ctxt for CtxtRepr { fn codemap(@mut self) -> @CodeMap { self.parse_sess.cm } fn parse_sess(@mut self) -> @mut parse::ParseSess { self.parse_sess } - fn cfg(@mut self) -> ast::crate_cfg { self.cfg } + fn cfg(@mut self) -> ast::crate_cfg { copy self.cfg } fn call_site(@mut self) -> span { match *self.backtrace { Some(@ExpandedFrom(CallInfo {call_site: cs, _})) => cs, @@ -214,7 +214,7 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, fn backtrace(@mut self) -> Option<@ExpnInfo> { *self.backtrace } fn mod_push(@mut self, i: ast::ident) { self.mod_path.push(i); } fn mod_pop(@mut self) { self.mod_path.pop(); } - fn mod_path(@mut self) -> ~[ast::ident] { return self.mod_path; } + fn mod_path(@mut self) -> ~[ast::ident] { copy self.mod_path } fn bt_push(@mut self, ei: codemap::ExpnInfo) { match ei { ExpandedFrom(CallInfo {call_site: cs, callee: ref callee}) => { @@ -222,7 +222,7 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, Some(@ExpandedFrom(CallInfo { call_site: span {lo: cs.lo, hi: cs.hi, expn_info: *self.backtrace}, - callee: (*callee)})); + callee: copy *callee})); } } } @@ -269,12 +269,11 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, fn set_trace_macros(@mut self, x: bool) { self.trace_mac = x } - fn str_of(@mut self, id: ast::ident) -> ~str { - *self.parse_sess.interner.get(id) + copy *self.parse_sess.interner.get(id) } fn ident_of(@mut self, st: ~str) -> ast::ident { - self.parse_sess.interner.intern(@st) + self.parse_sess.interner.intern(@/*bad*/ copy st) } } let imp: @mut CtxtRepr = @mut CtxtRepr { @@ -290,7 +289,7 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, pub fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, err_msg: ~str) -> ~str { match expr.node { ast::expr_lit(l) => match l.node { - ast::lit_str(s) => return *s, + ast::lit_str(s) => copy *s, _ => cx.span_fatal(l.span, err_msg) }, _ => cx.span_fatal(expr.span, err_msg) diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 55e5d5fbe17cc..13a29c54b97fb 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -26,7 +26,7 @@ pub struct Field { pub fn mk_expr(cx: ext_ctxt, sp: codemap::span, - expr: ast::expr_) + +expr: ast::expr_) -> @ast::expr { @ast::expr { id: cx.next_id(), @@ -62,7 +62,7 @@ pub fn mk_unary(cx: ext_ctxt, sp: span, op: ast::unop, e: @ast::expr) cx.next_id(); // see ast_util::op_expr_callee_id mk_expr(cx, sp, ast::expr_unary(op, e)) } -pub fn mk_raw_path(sp: span, idents: ~[ast::ident]) -> @ast::path { +pub fn mk_raw_path(sp: span, +idents: ~[ast::ident]) -> @ast::path { let p = @ast::path { span: sp, global: false, idents: idents, @@ -71,7 +71,7 @@ pub fn mk_raw_path(sp: span, idents: ~[ast::ident]) -> @ast::path { return p; } pub fn mk_raw_path_(sp: span, - idents: ~[ast::ident], + +idents: ~[ast::ident], +types: ~[@ast::Ty]) -> @ast::path { @ast::path { span: sp, @@ -80,17 +80,17 @@ pub fn mk_raw_path_(sp: span, rp: None, types: types } } -pub fn mk_raw_path_global(sp: span, idents: ~[ast::ident]) -> @ast::path { +pub fn mk_raw_path_global(sp: span, +idents: ~[ast::ident]) -> @ast::path { @ast::path { span: sp, global: true, idents: idents, rp: None, types: ~[] } } -pub fn mk_path(cx: ext_ctxt, sp: span, idents: ~[ast::ident]) -> @ast::expr { +pub fn mk_path(cx: ext_ctxt, sp: span, +idents: ~[ast::ident]) -> @ast::expr { mk_expr(cx, sp, ast::expr_path(mk_raw_path(sp, idents))) } -pub fn mk_path_global(cx: ext_ctxt, sp: span, idents: ~[ast::ident]) +pub fn mk_path_global(cx: ext_ctxt, sp: span, +idents: ~[ast::ident]) -> @ast::expr { mk_expr(cx, sp, ast::expr_path(mk_raw_path_global(sp, idents))) } @@ -98,7 +98,7 @@ pub fn mk_access_(cx: ext_ctxt, sp: span, p: @ast::expr, m: ast::ident) -> @ast::expr { mk_expr(cx, sp, ast::expr_field(p, m, ~[])) } -pub fn mk_access(cx: ext_ctxt, sp: span, p: ~[ast::ident], m: ast::ident) +pub fn mk_access(cx: ext_ctxt, sp: span, +p: ~[ast::ident], m: ast::ident) -> @ast::expr { let pathexpr = mk_path(cx, sp, p); return mk_access_(cx, sp, pathexpr, m); @@ -107,21 +107,21 @@ pub fn mk_addr_of(cx: ext_ctxt, sp: span, e: @ast::expr) -> @ast::expr { return mk_expr(cx, sp, ast::expr_addr_of(ast::m_imm, e)); } pub fn mk_call_(cx: ext_ctxt, sp: span, fn_expr: @ast::expr, - args: ~[@ast::expr]) -> @ast::expr { + +args: ~[@ast::expr]) -> @ast::expr { mk_expr(cx, sp, ast::expr_call(fn_expr, args, ast::NoSugar)) } -pub fn mk_call(cx: ext_ctxt, sp: span, fn_path: ~[ast::ident], - args: ~[@ast::expr]) -> @ast::expr { +pub fn mk_call(cx: ext_ctxt, sp: span, +fn_path: ~[ast::ident], + +args: ~[@ast::expr]) -> @ast::expr { let pathexpr = mk_path(cx, sp, fn_path); return mk_call_(cx, sp, pathexpr, args); } -pub fn mk_call_global(cx: ext_ctxt, sp: span, fn_path: ~[ast::ident], - args: ~[@ast::expr]) -> @ast::expr { +pub fn mk_call_global(cx: ext_ctxt, sp: span, +fn_path: ~[ast::ident], + +args: ~[@ast::expr]) -> @ast::expr { let pathexpr = mk_path_global(cx, sp, fn_path); return mk_call_(cx, sp, pathexpr, args); } // e = expr, t = type -pub fn mk_base_vec_e(cx: ext_ctxt, sp: span, exprs: ~[@ast::expr]) +pub fn mk_base_vec_e(cx: ext_ctxt, sp: span, +exprs: ~[@ast::expr]) -> @ast::expr { let vecexpr = ast::expr_vec(exprs, ast::m_imm); mk_expr(cx, sp, vecexpr) @@ -131,25 +131,25 @@ pub fn mk_vstore_e(cx: ext_ctxt, sp: span, expr: @ast::expr, @ast::expr { mk_expr(cx, sp, ast::expr_vstore(expr, vst)) } -pub fn mk_uniq_vec_e(cx: ext_ctxt, sp: span, exprs: ~[@ast::expr]) +pub fn mk_uniq_vec_e(cx: ext_ctxt, sp: span, +exprs: ~[@ast::expr]) -> @ast::expr { mk_vstore_e(cx, sp, mk_base_vec_e(cx, sp, exprs), ast::expr_vstore_uniq) } -pub fn mk_slice_vec_e(cx: ext_ctxt, sp: span, exprs: ~[@ast::expr]) +pub fn mk_slice_vec_e(cx: ext_ctxt, sp: span, +exprs: ~[@ast::expr]) -> @ast::expr { mk_vstore_e(cx, sp, mk_base_vec_e(cx, sp, exprs), ast::expr_vstore_slice) } -pub fn mk_fixed_vec_e(cx: ext_ctxt, sp: span, exprs: ~[@ast::expr]) +pub fn mk_fixed_vec_e(cx: ext_ctxt, sp: span, +exprs: ~[@ast::expr]) -> @ast::expr { mk_vstore_e(cx, sp, mk_base_vec_e(cx, sp, exprs), ast::expr_vstore_fixed(None)) } -pub fn mk_base_str(cx: ext_ctxt, sp: span, s: ~str) -> @ast::expr { +pub fn mk_base_str(cx: ext_ctxt, sp: span, +s: ~str) -> @ast::expr { let lit = ast::lit_str(@s); return mk_lit(cx, sp, lit); } -pub fn mk_uniq_str(cx: ext_ctxt, sp: span, s: ~str) -> @ast::expr { +pub fn mk_uniq_str(cx: ext_ctxt, sp: span, +s: ~str) -> @ast::expr { mk_vstore_e(cx, sp, mk_base_str(cx, sp, s), ast::expr_vstore_uniq) } pub fn mk_field(sp: span, f: &Field) -> ast::field { @@ -161,28 +161,36 @@ pub fn mk_field(sp: span, f: &Field) -> ast::field { pub fn mk_fields(sp: span, fields: ~[Field]) -> ~[ast::field] { fields.map(|f| mk_field(sp, f)) } -pub fn mk_rec_e(cx: ext_ctxt, sp: span, fields: ~[Field]) -> @ast::expr { +pub fn mk_rec_e(cx: ext_ctxt, + sp: span, + +fields: ~[Field]) + -> @ast::expr { mk_expr(cx, sp, ast::expr_rec(mk_fields(sp, fields), option::None::<@ast::expr>)) } -pub fn mk_struct_e(cx: ext_ctxt, sp: span, ctor_path: ~[ast::ident], - fields: ~[Field]) -> @ast::expr { +pub fn mk_struct_e(cx: ext_ctxt, + sp: span, + +ctor_path: ~[ast::ident], + +fields: ~[Field]) + -> @ast::expr { mk_expr(cx, sp, ast::expr_struct(mk_raw_path(sp, ctor_path), mk_fields(sp, fields), option::None::<@ast::expr>)) } -pub fn mk_global_struct_e(cx: ext_ctxt, sp: span, - ctor_path: ~[ast::ident], - fields: ~[Field]) +pub fn mk_global_struct_e(cx: ext_ctxt, + sp: span, + +ctor_path: ~[ast::ident], + +fields: ~[Field]) -> @ast::expr { mk_expr(cx, sp, ast::expr_struct(mk_raw_path_global(sp, ctor_path), mk_fields(sp, fields), option::None::<@ast::expr>)) } -pub fn mk_glob_use(cx: ext_ctxt, sp: span, path: ~[ast::ident]) - -> @ast::view_item { +pub fn mk_glob_use(cx: ext_ctxt, + sp: span, + +path: ~[ast::ident]) -> @ast::view_item { let glob = @codemap::spanned { node: ast::view_path_glob(mk_raw_path(sp, path), cx.next_id()), span: sp, @@ -218,8 +226,8 @@ pub fn mk_local(cx: ext_ctxt, sp: span, mutbl: bool, @codemap::spanned { node: ast::stmt_decl(@decl, cx.next_id()), span: sp } } pub fn mk_block(cx: ext_ctxt, span: span, - view_items: ~[@ast::view_item], - stmts: ~[@ast::stmt], + +view_items: ~[@ast::view_item], + +stmts: ~[@ast::stmt], expr: Option<@ast::expr>) -> @ast::expr { let blk = codemap::spanned { node: ast::blk_ { @@ -313,7 +321,7 @@ pub fn mk_stmt(cx: ext_ctxt, span: span, expr: @ast::expr) -> @ast::stmt { } pub fn mk_ty_path(cx: ext_ctxt, span: span, - idents: ~[ ast::ident ]) + +idents: ~[ ast::ident ]) -> @ast::Ty { let ty = build::mk_raw_path(span, idents); let ty = ast::ty_path(ty, cx.next_id()); @@ -322,7 +330,7 @@ pub fn mk_ty_path(cx: ext_ctxt, } pub fn mk_ty_path_global(cx: ext_ctxt, span: span, - idents: ~[ ast::ident ]) + +idents: ~[ ast::ident ]) -> @ast::Ty { let ty = build::mk_raw_path_global(span, idents); let ty = ast::ty_path(ty, cx.next_id()); diff --git a/src/libsyntax/ext/deriving.rs b/src/libsyntax/ext/deriving.rs index 4942558f8bc34..54fb6bd3bb6ea 100644 --- a/src/libsyntax/ext/deriving.rs +++ b/src/libsyntax/ext/deriving.rs @@ -472,8 +472,8 @@ fn call_substructure_iter_bytes_method(cx: ext_ctxt, fn variant_arg_count(cx: ext_ctxt, span: span, variant: &variant) -> uint { match variant.node.kind { - tuple_variant_kind(args) => args.len(), - struct_variant_kind(struct_def) => struct_def.fields.len(), + tuple_variant_kind(ref args) => args.len(), + struct_variant_kind(ref struct_def) => struct_def.fields.len(), enum_variant_kind(*) => { cx.span_bug(span, ~"variant_arg_count: enum variants deprecated") } diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index f546c718393b6..030e819ca3f1c 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -31,8 +31,8 @@ pub fn expand_syntax_ext(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) // Option rather than just an maybe-empty string. let e = match os::getenv(var) { - option::None => mk_uniq_str(cx, sp, ~""), - option::Some(ref s) => mk_uniq_str(cx, sp, (*s)) + None => mk_uniq_str(cx, sp, ~""), + Some(ref s) => mk_uniq_str(cx, sp, copy *s) }; MRExpr(e) } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index d26b566ecf67a..282506929ffde 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -52,7 +52,7 @@ pub fn expand_expr(exts: SyntaxExtensions, cx: ext_ctxt, cx.bt_push(ExpandedFrom(CallInfo { call_site: s, callee: NameAndSpan { - name: *extname, + name: copy *extname, span: exp_sp, }, })); @@ -72,7 +72,8 @@ pub fn expand_expr(exts: SyntaxExtensions, cx: ext_ctxt, }; //keep going, outside-in - let fully_expanded = fld.fold_expr(expanded).node; + let fully_expanded = + copy fld.fold_expr(expanded).node; cx.bt_pop(); (fully_expanded, s) @@ -169,7 +170,7 @@ pub fn expand_item_mac(exts: SyntaxExtensions, let (pth, tts) = match it.node { item_mac(codemap::spanned { node: mac_invoc_tt(pth, ref tts), _}) => { - (pth, (*tts)) + (pth, copy *tts) } _ => cx.span_bug(it.span, ~"invalid item macro invocation") }; @@ -189,8 +190,8 @@ pub fn expand_item_mac(exts: SyntaxExtensions, cx.bt_push(ExpandedFrom(CallInfo { call_site: it.span, callee: NameAndSpan { - name: *extname, - span: (*expand).span + name: copy *extname, + span: expand.span } })); ((*expand).expander)(cx, it.span, tts) @@ -204,8 +205,8 @@ pub fn expand_item_mac(exts: SyntaxExtensions, cx.bt_push(ExpandedFrom(CallInfo { call_site: it.span, callee: NameAndSpan { - name: *extname, - span: (*expand).span + name: copy *extname, + span: expand.span } })); ((*expand).expander)(cx, it.span, it.ident, tts) @@ -238,7 +239,9 @@ pub fn expand_stmt(exts: SyntaxExtensions, cx: ext_ctxt, let (mac, pth, tts, semi) = match *s { stmt_mac(ref mac, semi) => { match mac.node { - mac_invoc_tt(pth, ref tts) => ((*mac), pth, (*tts), semi) + mac_invoc_tt(pth, ref tts) => { + (copy *mac, pth, copy *tts, semi) + } } } _ => return orig(s, sp, fld) @@ -254,7 +257,7 @@ pub fn expand_stmt(exts: SyntaxExtensions, cx: ext_ctxt, SyntaxExpanderTT{expander: exp, span: exp_sp})) => { cx.bt_push(ExpandedFrom(CallInfo { call_site: sp, - callee: NameAndSpan { name: *extname, span: exp_sp } + callee: NameAndSpan { name: copy *extname, span: exp_sp } })); let expanded = match exp(cx, mac.span, tts) { MRExpr(e) => @@ -267,7 +270,7 @@ pub fn expand_stmt(exts: SyntaxExtensions, cx: ext_ctxt, }; //keep going, outside-in - let fully_expanded = fld.fold_stmt(expanded).node; + let fully_expanded = copy fld.fold_stmt(expanded).node; cx.bt_pop(); (fully_expanded, sp) @@ -351,7 +354,7 @@ pub fn expand_crate(parse_sess: @mut parse::ParseSess, cfg: ast::crate_cfg, c: @crate) -> @crate { let exts = syntax_expander_table(); let afp = default_ast_fold(); - let cx: ext_ctxt = mk_ctxt(parse_sess, cfg); + let cx: ext_ctxt = mk_ctxt(parse_sess, copy cfg); let f_pre = @AstFoldFns { fold_expr: |a,b,c| expand_expr(exts, cx, a, b, c, afp.fold_expr), fold_mod: |a,b| expand_mod_items(exts, cx, a, b, afp.fold_mod), @@ -362,7 +365,7 @@ pub fn expand_crate(parse_sess: @mut parse::ParseSess, let f = make_fold(f_pre); let cm = parse_expr_from_source_str(~"", @core_macros(), - cfg, + copy cfg, parse_sess); // This is run for its side-effects on the expander env, diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs index d15b228365ce5..2169e8f6f2ffb 100644 --- a/src/libsyntax/ext/fmt.rs +++ b/src/libsyntax/ext/fmt.rs @@ -276,9 +276,9 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, for pieces.each |pc| { match *pc { PieceString(ref s) => { - piece_exprs.push(mk_uniq_str(cx, fmt_sp, (*s))) + piece_exprs.push(mk_uniq_str(cx, fmt_sp, copy *s)) } - PieceConv(conv) => { + PieceConv(ref conv) => { n += 1u; if n >= nargs { cx.span_fatal(sp, @@ -286,9 +286,14 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, ~"for the given format string"); } debug!("Building conversion:"); - log_conv(conv); + log_conv(/*bad*/ copy *conv); let arg_expr = args[n]; - let c_expr = make_new_conv(cx, fmt_sp, conv, arg_expr); + let c_expr = make_new_conv( + cx, + fmt_sp, + /*bad*/ copy *conv, + arg_expr + ); piece_exprs.push(c_expr); } } diff --git a/src/libsyntax/ext/pipes/ast_builder.rs b/src/libsyntax/ext/pipes/ast_builder.rs index ed167a8e2a754..7917a072414a6 100644 --- a/src/libsyntax/ext/pipes/ast_builder.rs +++ b/src/libsyntax/ext/pipes/ast_builder.rs @@ -58,14 +58,14 @@ pub impl append_types for @ast::path { fn add_ty(&self, ty: @ast::Ty) -> @ast::path { @ast::path { types: vec::append_one(copy self.types, ty), - .. **self + .. copy **self } } fn add_tys(&self, +tys: ~[@ast::Ty]) -> @ast::path { @ast::path { types: vec::append(copy self.types, tys), - .. **self + .. copy **self } } } @@ -97,12 +97,12 @@ pub trait ext_ctxt_ast_builder { +enum_definition: ast::enum_def) -> @ast::item; fn item_struct_poly(&self, name: ident, span: span, - struct_def: ast::struct_def, + +struct_def: ast::struct_def, +ty_params: ~[ast::ty_param]) -> @ast::item; fn item_struct(&self, name: ident, span: span, - struct_def: ast::struct_def) -> @ast::item; + +struct_def: ast::struct_def) -> @ast::item; fn struct_expr(&self, path: @ast::path, - fields: ~[ast::field]) -> @ast::expr; + +fields: ~[ast::field]) -> @ast::expr; fn variant(&self, name: ident, span: span, +tys: ~[@ast::Ty]) -> ast::variant; fn item_mod(&self, name: ident, span: span, @@ -284,18 +284,18 @@ pub impl ext_ctxt_ast_builder for ext_ctxt { } fn item_struct(&self, name: ident, span: span, - struct_def: ast::struct_def) -> @ast::item { + +struct_def: ast::struct_def) -> @ast::item { self.item_struct_poly(name, span, struct_def, ~[]) } fn item_struct_poly(&self, name: ident, span: span, - struct_def: ast::struct_def, + +struct_def: ast::struct_def, +ty_params: ~[ast::ty_param]) -> @ast::item { self.item(name, span, ast::item_struct(@struct_def, ty_params)) } fn struct_expr(&self, path: @ast::path, - fields: ~[ast::field]) -> @ast::expr { + +fields: ~[ast::field]) -> @ast::expr { @ast::expr { id: self.next_id(), callee_id: self.next_id(), diff --git a/src/libsyntax/ext/pipes/liveness.rs b/src/libsyntax/ext/pipes/liveness.rs index a7f01d7564859..c5bed32a24f2a 100644 --- a/src/libsyntax/ext/pipes/liveness.rs +++ b/src/libsyntax/ext/pipes/liveness.rs @@ -88,7 +88,7 @@ pub fn analyze(proto: protocol, _cx: ext_ctxt) { } if self_live.len() > 0 { - let states = str::connect(self_live.map(|s| s.name), ~" "); + let states = str::connect(self_live.map(|s| copy s.name), ~" "); debug!("protocol %s is unbounded due to loops involving: %s", proto.name, states); diff --git a/src/libsyntax/ext/pipes/mod.rs b/src/libsyntax/ext/pipes/mod.rs index 8b8e48bd5229b..c26bfd178b024 100644 --- a/src/libsyntax/ext/pipes/mod.rs +++ b/src/libsyntax/ext/pipes/mod.rs @@ -65,11 +65,11 @@ pub mod liveness; pub fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident, - tt: ~[ast::token_tree]) -> base::MacResult { + tt: ~[ast::token_tree]) -> base::MacResult { let sess = cx.parse_sess(); let cfg = cx.cfg(); let tt_rdr = new_tt_reader(copy cx.parse_sess().span_diagnostic, - cx.parse_sess().interner, None, tt); + cx.parse_sess().interner, None, copy tt); let rdr = tt_rdr as reader; let rust_parser = Parser(sess, cfg, rdr.dup()); diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs index a5d2a1783d63f..ce253f6156b5f 100644 --- a/src/libsyntax/ext/pipes/parse_proto.rs +++ b/src/libsyntax/ext/pipes/parse_proto.rs @@ -18,13 +18,13 @@ use parse::token; use core::prelude::*; pub trait proto_parser { - fn parse_proto(&self, id: ~str) -> protocol; + fn parse_proto(&self, +id: ~str) -> protocol; fn parse_state(&self, proto: protocol); fn parse_message(&self, state: state); } pub impl proto_parser for parser::Parser { - fn parse_proto(&self, id: ~str) -> protocol { + fn parse_proto(&self, +id: ~str) -> protocol { let proto = protocol(id, *self.span); self.parse_seq_to_before_end( @@ -41,7 +41,7 @@ pub impl proto_parser for parser::Parser { fn parse_state(&self, proto: protocol) { let id = self.parse_ident(); - let name = *self.interner.get(id); + let name = copy *self.interner.get(id); self.expect(&token::COLON); let dir = match copy *self.token { @@ -76,7 +76,7 @@ pub impl proto_parser for parser::Parser { } fn parse_message(&self, state: state) { - let mname = *self.interner.get(self.parse_ident()); + let mname = copy *self.interner.get(self.parse_ident()); let args = if *self.token == token::LPAREN { self.parse_unspanned_seq( @@ -95,7 +95,7 @@ pub impl proto_parser for parser::Parser { let next = match *self.token { token::IDENT(_, _) => { - let name = *self.interner.get(self.parse_ident()); + let name = copy *self.interner.get(self.parse_ident()); let ntys = if *self.token == token::LT { self.parse_unspanned_seq( &token::LT, diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 16de2a20668cf..be25876cfbf30 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -118,7 +118,7 @@ pub impl gen_send for message { let mut rty = cx.ty_path_ast_builder(path(~[next.data_name()], span) - .add_tys(next_state.tys)); + .add_tys(copy next_state.tys)); if try { rty = cx.ty_option(rty); } @@ -152,7 +152,7 @@ pub impl gen_send for message { ~"" } else { - ~"(" + str::connect(arg_names.map(|x| *x), + ~"(" + str::connect(arg_names.map(|x| copy *x), ~", ") + ~")" }; @@ -209,7 +209,7 @@ pub impl to_type_decls for state { let mut items_msg = ~[]; for self.messages.each |m| { - let message(name, span, tys, this, next) = *m; + let message(name, span, tys, this, next) = copy *m; let tys = match next { Some(ref next_state) => { @@ -225,7 +225,7 @@ pub impl to_type_decls for state { cx.ty_path_ast_builder( path(~[cx.ident_of(dir), cx.ident_of(next_name)], span) - .add_tys(next_state.tys))) + .add_tys(copy next_state.tys))) } None => tys }; diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index 831b1b79ff86c..52eb88d0700e8 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -50,7 +50,7 @@ pub struct message(~str, span, ~[@ast::Ty], state, Option); pub impl message { fn name(&mut self) -> ~str { match *self { - message(ref id, _, _, _, _) => (*id) + message(ref id, _, _, _, _) => copy *id } } @@ -63,7 +63,7 @@ pub impl message { /// Return the type parameters actually used by this message fn get_params(&mut self) -> ~[ast::ty_param] { match *self { - message(_, _, _, this, _) => this.ty_params + message(_, _, _, this, _) => copy this.ty_params } } } @@ -82,8 +82,8 @@ pub struct state_ { } pub impl state_ { - fn add_message(@self, name: ~str, span: span, - +data: ~[@ast::Ty], next: Option) { + fn add_message(@self, +name: ~str, span: span, + +data: ~[@ast::Ty], +next: Option) { self.messages.push(message(name, span, data, self, next)); } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 6d1ab58444693..534953b4c8adf 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -262,10 +262,10 @@ pub fn expand_quote_stmt(cx: ext_ctxt, } fn ids_ext(cx: ext_ctxt, strs: ~[~str]) -> ~[ast::ident] { - strs.map(|str| cx.parse_sess().interner.intern(@*str)) + strs.map(|str| cx.parse_sess().interner.intern(@copy *str)) } -fn id_ext(cx: ext_ctxt, str: ~str) -> ast::ident { +fn id_ext(cx: ext_ctxt, +str: ~str) -> ast::ident { cx.parse_sess().interner.intern(@str) } @@ -580,8 +580,8 @@ fn expand_tts(cx: ext_ctxt, fn expand_parse_call(cx: ext_ctxt, sp: span, - parse_method: ~str, - arg_exprs: ~[@ast::expr], + +parse_method: ~str, + +arg_exprs: ~[@ast::expr], tts: &[ast::token_tree]) -> @ast::expr { let tts_expr = expand_tts(cx, sp, tts); diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 82e7de08d6533..86c4cbee04b6c 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -138,10 +138,10 @@ pub fn count_names(ms: &[matcher]) -> uint { } #[allow(non_implicitly_copyable_typarams)] -pub fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: BytePos) +pub fn initial_matcher_pos(+ms: ~[matcher], sep: Option, lo: BytePos) -> ~MatcherPos { let mut match_idx_hi = 0u; - for ms.each() |elt| { + for ms.each |elt| { match elt.node { match_tok(_) => (), match_seq(_,_,_,_,hi) => { @@ -152,12 +152,13 @@ pub fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: BytePos) } } } + let matches = vec::from_fn(count_names(ms), |_i| dvec::DVec()); ~MatcherPos { elts: ms, sep: sep, idx: 0u, up: matcher_pos_up(None), - matches: copy vec::from_fn(count_names(ms), |_i| dvec::DVec()), + matches: matches, match_lo: 0u, match_hi: match_idx_hi, sp_lo: lo @@ -238,7 +239,7 @@ pub fn parse(sess: @mut ParseSess, ms: ~[matcher]) -> parse_result { let mut cur_eis = ~[]; - cur_eis.push(initial_matcher_pos(ms, None, rdr.peek().sp.lo)); + cur_eis.push(initial_matcher_pos(copy ms, None, rdr.peek().sp.lo)); loop { let mut bb_eis = ~[]; // black-box parsed by parser.rs @@ -329,8 +330,8 @@ pub fn parse(sess: @mut ParseSess, |_m| DVec::<@named_match>()); let ei_t = ei; cur_eis.push(~MatcherPos { - elts: (*matchers), - sep: (*sep), + elts: copy *matchers, + sep: copy *sep, idx: 0u, up: matcher_pos_up(Some(ei_t)), matches: matches, diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index cfb6e45cb5bb9..5c20abc385d3e 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -54,7 +54,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, // Parse the macro_rules! invocation (`none` is for no interpolations): let arg_reader = new_tt_reader(copy cx.parse_sess().span_diagnostic, - cx.parse_sess().interner, None, arg); + cx.parse_sess().interner, None, copy arg); let argument_map = parse_or_else(cx.parse_sess(), cx.cfg(), arg_reader as reader, argument_gram); @@ -130,7 +130,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, } failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo { best_fail_spot = sp; - best_fail_msg = (*msg); + best_fail_msg = copy *msg; }, error(sp, ref msg) => cx.span_fatal(sp, (*msg)) } @@ -145,7 +145,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, |cx, sp, arg| generic_extension(cx, sp, name, arg, lhses, rhses); return MRDef(MacroDef{ - name: *cx.parse_sess().interner.get(name), + name: copy *cx.parse_sess().interner.get(name), ext: NormalTT(base::SyntaxExpanderTT{expander: exp, span: Some(sp)}) }); } diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index a9502ff29020e..4aa6236bf5a8a 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -54,7 +54,7 @@ pub struct TtReader { pub fn new_tt_reader(sp_diag: span_handler, itr: @ident_interner, interp: Option>, - src: ~[ast::token_tree]) + +src: ~[ast::token_tree]) -> @mut TtReader { let r = @mut TtReader { sp_diag: sp_diag, @@ -101,7 +101,7 @@ pub pure fn dup_tt_reader(r: @mut TtReader) -> @mut TtReader { interpolations: r.interpolations, repeat_idx: copy r.repeat_idx, repeat_len: copy r.repeat_len, - cur_tok: r.cur_tok, + cur_tok: copy r.cur_tok, cur_span: r.cur_span } } @@ -115,7 +115,7 @@ pure fn lookup_cur_matched_by_matched(r: @mut TtReader, // end of the line; duplicate henceforth ad } - matched_seq(ads, _) => ads[*idx] + matched_seq(ref ads, _) => ads[*idx] } } vec::foldl(start, r.repeat_idx, red) @@ -131,15 +131,15 @@ enum lis { fn lockstep_iter_size(t: token_tree, r: @mut TtReader) -> lis { fn lis_merge(lhs: lis, rhs: lis, r: @mut TtReader) -> lis { match lhs { - lis_unconstrained => rhs, - lis_contradiction(_) => lhs, + lis_unconstrained => copy rhs, + lis_contradiction(_) => copy lhs, lis_constraint(l_len, l_id) => match rhs { - lis_unconstrained => lhs, - lis_contradiction(_) => rhs, - lis_constraint(r_len, _) if l_len == r_len => lhs, + lis_unconstrained => copy lhs, + lis_contradiction(_) => copy rhs, + lis_constraint(r_len, _) if l_len == r_len => copy lhs, lis_constraint(r_len, r_id) => { - let l_n = *r.interner.get(l_id); - let r_n = *r.interner.get(r_id); + let l_n = copy *r.interner.get(l_id); + let r_n = copy *r.interner.get(r_id); lis_contradiction(fmt!("Inconsistent lockstep iteration: \ '%s' has %u items, but '%s' has %u", l_n, l_len, r_n, r_len)) @@ -155,14 +155,17 @@ fn lockstep_iter_size(t: token_tree, r: @mut TtReader) -> lis { tt_tok(*) => lis_unconstrained, tt_nonterminal(_, name) => match *lookup_cur_matched(r, name) { matched_nonterminal(_) => lis_unconstrained, - matched_seq(ads, _) => lis_constraint(ads.len(), name) + matched_seq(ref ads, _) => lis_constraint(ads.len(), name) } } } pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan { - let ret_val = TokenAndSpan { tok: r.cur_tok, sp: r.cur_span }; + let ret_val = TokenAndSpan { + tok: copy r.cur_tok, + sp: r.cur_span, + }; while r.cur.idx >= r.cur.readme.len() { /* done with this set; pop or repeat? */ if ! r.cur.dotdotdoted @@ -210,12 +213,13 @@ pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan { // if this could be 0-length, we'd need to potentially recur here } tt_tok(sp, copy tok) => { - r.cur_span = sp; r.cur_tok = tok; + r.cur_span = sp; + r.cur_tok = tok; r.cur.idx += 1u; return ret_val; } tt_seq(sp, copy tts, copy sep, zerok) => { - match lockstep_iter_size(tt_seq(sp, tts, sep, zerok), r) { + match lockstep_iter_size(tt_seq(sp, copy tts, sep, zerok), r) { lis_unconstrained => { r.sp_diag.span_fatal( sp, /* blame macro writer */ @@ -264,7 +268,8 @@ pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan { return ret_val; } matched_nonterminal(ref other_whole_nt) => { - r.cur_span = sp; r.cur_tok = INTERPOLATED((*other_whole_nt)); + r.cur_span = sp; + r.cur_tok = INTERPOLATED(copy *other_whole_nt); r.cur.idx += 1u; return ret_val; } From da7aedc2306a7ea2efee0864e8bdf6dbf53d73a4 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 24 Feb 2013 16:56:49 -0800 Subject: [PATCH 29/30] libsyntax: add some explicit refs --- src/libsyntax/attr.rs | 9 ++++----- src/libsyntax/fold.rs | 12 ++++++------ 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 5b2d6f7600b54..e744d19b96544 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -300,7 +300,7 @@ pub fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: &str) -> pub fn find_linkage_metas(attrs: &[ast::attribute]) -> ~[@ast::meta_item] { do find_attrs_by_name(attrs, ~"link").flat_map |attr| { match attr.node.value.node { - ast::meta_list(_, items) => /* FIXME (#2543) */ copy items, + ast::meta_list(_, ref items) => /* FIXME (#2543) */ copy *items, _ => ~[] } } @@ -347,11 +347,10 @@ pub fn find_inline_attr(attrs: &[ast::attribute]) -> inline_attr { do vec::foldl(ia_none, attrs) |ia,attr| { match attr.node.value.node { ast::meta_word(@~"inline") => ia_hint, - ast::meta_list(@~"inline", items) => { - if !vec::is_empty(find_meta_items_by_name(items, ~"always")) { + ast::meta_list(@~"inline", ref items) => { + if !find_meta_items_by_name(*items, ~"always").is_empty() { ia_always - } else if !vec::is_empty( - find_meta_items_by_name(items, ~"never")) { + } else if !find_meta_items_by_name(*items, ~"never").is_empty() { ia_never } else { ia_hint diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index d67596e100fa0..68d05e11171c5 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -372,7 +372,7 @@ fn noop_fold_arm(a: &arm, fld: ast_fold) -> arm { pub fn noop_fold_pat(p: &pat_, fld: ast_fold) -> pat_ { match *p { pat_wild => pat_wild, - pat_ident(binding_mode, pth, sub) => { + pat_ident(binding_mode, pth, ref sub) => { pat_ident( binding_mode, fld.fold_path(pth), @@ -380,13 +380,13 @@ pub fn noop_fold_pat(p: &pat_, fld: ast_fold) -> pat_ { ) } pat_lit(e) => pat_lit(fld.fold_expr(e)), - pat_enum(pth, pats) => { + pat_enum(pth, ref pats) => { pat_enum( fld.fold_path(pth), pats.map(|pats| pats.map(|x| fld.fold_pat(*x))) ) } - pat_rec(fields, etc) => { + pat_rec(ref fields, etc) => { let fs = do fields.map |f| { ast::field_pat { ident: /* FIXME (#2543) */ copy f.ident, @@ -395,7 +395,7 @@ pub fn noop_fold_pat(p: &pat_, fld: ast_fold) -> pat_ { }; pat_rec(fs, etc) } - pat_struct(pth, fields, etc) => { + pat_struct(pth, ref fields, etc) => { let pth_ = fld.fold_path(pth); let fs = do fields.map |f| { ast::field_pat { @@ -412,7 +412,7 @@ pub fn noop_fold_pat(p: &pat_, fld: ast_fold) -> pat_ { pat_range(e1, e2) => { pat_range(fld.fold_expr(e1), fld.fold_expr(e2)) }, - pat_vec(elts, tail) => { + pat_vec(ref elts, ref tail) => { pat_vec( elts.map(|x| fld.fold_pat(*x)), tail.map(|tail| fld.fold_pat(*tail)) @@ -423,7 +423,7 @@ pub fn noop_fold_pat(p: &pat_, fld: ast_fold) -> pat_ { fn noop_fold_decl(d: &decl_, fld: ast_fold) -> decl_ { match *d { - decl_local(ls) => decl_local(ls.map(|x| fld.fold_local(*x))), + decl_local(ref ls) => decl_local(ls.map(|x| fld.fold_local(*x))), decl_item(it) => { match fld.fold_item(it) { Some(it_folded) => decl_item(it_folded), From ea36a0dee1630e24ba2889ca13550026b1af4f9d Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Tue, 26 Feb 2013 20:18:01 -0800 Subject: [PATCH 30/30] libsyntax: add some more explicit copies --- src/libsyntax/ext/deriving.rs | 2 +- src/libsyntax/ext/trace_macros.rs | 6 +- src/libsyntax/ext/tt/macro_parser.rs | 21 ++- src/libsyntax/ext/tt/transcribe.rs | 4 +- src/libsyntax/parse/mod.rs | 216 +++++++++++++++++---------- src/libsyntax/parse/parser.rs | 2 +- 6 files changed, 160 insertions(+), 91 deletions(-) diff --git a/src/libsyntax/ext/deriving.rs b/src/libsyntax/ext/deriving.rs index 54fb6bd3bb6ea..81d2aee46a614 100644 --- a/src/libsyntax/ext/deriving.rs +++ b/src/libsyntax/ext/deriving.rs @@ -838,7 +838,7 @@ fn expand_deriving_eq_struct_tuple_method(cx: ext_ctxt, let self_str = ~"self"; let other_str = ~"__other"; let type_path = build::mk_raw_path(span, ~[type_ident]); - let fields = struct_def.fields; + let fields = copy struct_def.fields; // Create comparison expression, comparing each of the fields let mut match_body = None; diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index fb7b41be2d670..bb6d656d5cc12 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -29,7 +29,11 @@ pub fn expand_trace_macros(cx: ext_ctxt, sp: span, vec::from_slice(tt) ); let rdr = tt_rdr as reader; - let rust_parser = Parser(sess, cfg, rdr.dup()); + let rust_parser = Parser( + sess, + copy cfg, + rdr.dup() + ); if rust_parser.is_keyword(&~"true") { cx.set_trace_macros(true); diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 86c4cbee04b6c..419c051ea9791 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -224,8 +224,12 @@ pub enum parse_result { error(codemap::span, ~str) } -pub fn parse_or_else(sess: @mut ParseSess, cfg: ast::crate_cfg, rdr: reader, - ms: ~[matcher]) -> HashMap { +pub fn parse_or_else( + sess: @mut ParseSess, + +cfg: ast::crate_cfg, + rdr: reader, + ms: ~[matcher] +) -> HashMap { match parse(sess, cfg, rdr, ms) { success(m) => m, failure(sp, ref str) => sess.span_diagnostic.span_fatal(sp, (*str)), @@ -233,11 +237,12 @@ pub fn parse_or_else(sess: @mut ParseSess, cfg: ast::crate_cfg, rdr: reader, } } -pub fn parse(sess: @mut ParseSess, - cfg: ast::crate_cfg, - rdr: reader, - ms: ~[matcher]) - -> parse_result { +pub fn parse( + sess: @mut ParseSess, + cfg: ast::crate_cfg, + rdr: reader, + ms: ~[matcher] +) -> parse_result { let mut cur_eis = ~[]; cur_eis.push(initial_matcher_pos(copy ms, None, rdr.peek().sp.lo)); @@ -387,7 +392,7 @@ pub fn parse(sess: @mut ParseSess, } rdr.next_token(); } else /* bb_eis.len() == 1 */ { - let rust_parser = Parser(sess, cfg, rdr.dup()); + let rust_parser = Parser(sess, copy cfg, rdr.dup()); let mut ei = bb_eis.pop(); match ei.elts[ei.idx].node { diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 4aa6236bf5a8a..99afd7958e998 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -202,9 +202,9 @@ pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan { loop { /* because it's easiest, this handles `tt_delim` not starting with a `tt_tok`, even though it won't happen */ match r.cur.readme[r.cur.idx] { - tt_delim(tts) => { + tt_delim(copy tts) => { r.cur = @mut TtFrame { - readme: @mut copy tts, + readme: @mut tts, idx: 0u, dotdotdoted: false, sep: None, diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index cf5cb847018ac..d8c3ca06d76e5 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -81,78 +81,116 @@ pub fn new_parse_sess_special_handler(sh: span_handler, cm: @codemap::CodeMap) // this appears to be the main entry point for rust parsing by // rustc and crate: -pub fn parse_crate_from_file(input: &Path, cfg: ast::crate_cfg, - sess: @mut ParseSess) -> @ast::crate { - let p = new_parser_from_file(sess, cfg, input); - p.parse_crate_mod(cfg) +pub fn parse_crate_from_file( + input: &Path, + cfg: ast::crate_cfg, + sess: @mut ParseSess +) -> @ast::crate { + let p = new_parser_from_file(sess, /*bad*/ copy cfg, input); + p.parse_crate_mod(/*bad*/ copy cfg) // why is there no p.abort_if_errors here? } -pub fn parse_crate_from_source_str(name: ~str, - source: @~str, - cfg: ast::crate_cfg, - sess: @mut ParseSess) -> @ast::crate { - let p = new_parser_from_source_str(sess, cfg, /*bad*/ copy name, - codemap::FssNone, source); - let r = p.parse_crate_mod(cfg); +pub fn parse_crate_from_source_str( + name: ~str, + source: @~str, + cfg: ast::crate_cfg, + sess: @mut ParseSess +) -> @ast::crate { + let p = new_parser_from_source_str( + sess, + /*bad*/ copy cfg, + /*bad*/ copy name, + codemap::FssNone, + source + ); + let r = p.parse_crate_mod(/*bad*/ copy cfg); p.abort_if_errors(); - return r; + r } -pub fn parse_expr_from_source_str(name: ~str, - source: @~str, - cfg: ast::crate_cfg, - sess: @mut ParseSess) -> @ast::expr { - let p = new_parser_from_source_str(sess, cfg, /*bad*/ copy name, - codemap::FssNone, source); +pub fn parse_expr_from_source_str( + name: ~str, + source: @~str, + +cfg: ast::crate_cfg, + sess: @mut ParseSess +) -> @ast::expr { + let p = new_parser_from_source_str( + sess, + cfg, + /*bad*/ copy name, + codemap::FssNone, + source + ); let r = p.parse_expr(); p.abort_if_errors(); - return r; + r } -pub fn parse_item_from_source_str(name: ~str, - source: @~str, - cfg: ast::crate_cfg, - +attrs: ~[ast::attribute], - sess: @mut ParseSess) - -> Option<@ast::item> { - let p = new_parser_from_source_str(sess, cfg, /*bad*/ copy name, - codemap::FssNone, source); +pub fn parse_item_from_source_str( + name: ~str, + source: @~str, + +cfg: ast::crate_cfg, + +attrs: ~[ast::attribute], + sess: @mut ParseSess +) -> Option<@ast::item> { + let p = new_parser_from_source_str( + sess, + cfg, + /*bad*/ copy name, + codemap::FssNone, + source + ); let r = p.parse_item(attrs); p.abort_if_errors(); - return r; + r } -pub fn parse_stmt_from_source_str(name: ~str, - source: @~str, - cfg: ast::crate_cfg, - +attrs: ~[ast::attribute], - sess: @mut ParseSess) -> @ast::stmt { - let p = new_parser_from_source_str(sess, cfg, /*bad*/ copy name, - codemap::FssNone, source); +pub fn parse_stmt_from_source_str( + name: ~str, + source: @~str, + +cfg: ast::crate_cfg, + +attrs: ~[ast::attribute], + sess: @mut ParseSess +) -> @ast::stmt { + let p = new_parser_from_source_str( + sess, + cfg, + /*bad*/ copy name, + codemap::FssNone, + source + ); let r = p.parse_stmt(attrs); p.abort_if_errors(); - return r; + r } -pub fn parse_tts_from_source_str(name: ~str, - source: @~str, - cfg: ast::crate_cfg, - sess: @mut ParseSess) -> ~[ast::token_tree] { - let p = new_parser_from_source_str(sess, cfg, /*bad*/ copy name, - codemap::FssNone, source); +pub fn parse_tts_from_source_str( + name: ~str, + source: @~str, + +cfg: ast::crate_cfg, + sess: @mut ParseSess +) -> ~[ast::token_tree] { + let p = new_parser_from_source_str( + sess, + cfg, + /*bad*/ copy name, + codemap::FssNone, + source + ); *p.quote_depth += 1u; let r = p.parse_all_token_trees(); p.abort_if_errors(); - return r; + r } -pub fn parse_from_source_str(f: fn (p: Parser) -> T, - name: ~str, ss: codemap::FileSubstr, - source: @~str, cfg: ast::crate_cfg, - sess: @mut ParseSess) - -> T -{ +pub fn parse_from_source_str( + f: fn (Parser) -> T, + name: ~str, ss: codemap::FileSubstr, + source: @~str, + +cfg: ast::crate_cfg, + sess: @mut ParseSess +) -> T { let p = new_parser_from_source_str( sess, cfg, @@ -176,40 +214,51 @@ pub fn next_node_id(sess: @mut ParseSess) -> node_id { return rv; } -pub fn new_parser_from_source_str(sess: @mut ParseSess, cfg: ast::crate_cfg, - +name: ~str, +ss: codemap::FileSubstr, - source: @~str) -> Parser { +pub fn new_parser_from_source_str( + sess: @mut ParseSess, + +cfg: ast::crate_cfg, + +name: ~str, + +ss: codemap::FileSubstr, + source: @~str +) -> Parser { let filemap = sess.cm.new_filemap_w_substr(name, ss, source); - let srdr = lexer::new_string_reader(copy sess.span_diagnostic, - filemap, - sess.interner); - return Parser(sess, cfg, srdr as reader); + let srdr = lexer::new_string_reader( + copy sess.span_diagnostic, + filemap, + sess.interner + ); + Parser(sess, cfg, srdr as reader) } // Read the entire source file, return a parser // that draws from that string -pub fn new_parser_result_from_file(sess: @mut ParseSess, - cfg: ast::crate_cfg, - path: &Path) - -> Result { +pub fn new_parser_result_from_file( + sess: @mut ParseSess, + +cfg: ast::crate_cfg, + path: &Path +) -> Result { match io::read_whole_file_str(path) { - result::Ok(src) => { - - let filemap = sess.cm.new_filemap(path.to_str(), @src); - let srdr = lexer::new_string_reader(copy sess.span_diagnostic, - filemap, - sess.interner); - Ok(Parser(sess, cfg, srdr as reader)) + Ok(src) => { + let filemap = sess.cm.new_filemap(path.to_str(), @src); + let srdr = lexer::new_string_reader( + copy sess.span_diagnostic, + filemap, + sess.interner + ); + Ok(Parser(sess, cfg, srdr as reader)) - } - result::Err(e) => Err(e) + } + Err(e) => Err(e) } } /// Create a new parser for an entire crate, handling errors as appropriate /// if the file doesn't exist -pub fn new_parser_from_file(sess: @mut ParseSess, cfg: ast::crate_cfg, - path: &Path) -> Parser { +pub fn new_parser_from_file( + sess: @mut ParseSess, + +cfg: ast::crate_cfg, + path: &Path +) -> Parser { match new_parser_result_from_file(sess, cfg, path) { Ok(parser) => parser, Err(e) => { @@ -220,8 +269,12 @@ pub fn new_parser_from_file(sess: @mut ParseSess, cfg: ast::crate_cfg, /// Create a new parser based on a span from an existing parser. Handles /// error messages correctly when the file does not exist. -pub fn new_sub_parser_from_file(sess: @mut ParseSess, cfg: ast::crate_cfg, - path: &Path, sp: span) -> Parser { +pub fn new_sub_parser_from_file( + sess: @mut ParseSess, + +cfg: ast::crate_cfg, + path: &Path, + sp: span +) -> Parser { match new_parser_result_from_file(sess, cfg, path) { Ok(parser) => parser, Err(e) => { @@ -230,11 +283,18 @@ pub fn new_sub_parser_from_file(sess: @mut ParseSess, cfg: ast::crate_cfg, } } -pub fn new_parser_from_tts(sess: @mut ParseSess, cfg: ast::crate_cfg, - +tts: ~[ast::token_tree]) -> Parser { - let trdr = lexer::new_tt_reader(copy sess.span_diagnostic, sess.interner, - None, tts); - return Parser(sess, cfg, trdr as reader) +pub fn new_parser_from_tts( + sess: @mut ParseSess, + +cfg: ast::crate_cfg, + +tts: ~[ast::token_tree] +) -> Parser { + let trdr = lexer::new_tt_reader( + copy sess.span_diagnostic, + sess.interner, + None, + tts + ); + Parser(sess, cfg, trdr as reader) } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 2c7947090c243..dffa04ac1cab0 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -3492,7 +3492,7 @@ pub impl Parser { }; let full_path = full_path.normalize(); let p0 = - new_sub_parser_from_file(self.sess, self.cfg, + new_sub_parser_from_file(self.sess, copy self.cfg, &full_path, id_sp); let (inner, next) = p0.parse_inner_attrs_and_next(); let mod_attrs = vec::append(