Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 5 pull requests #132211

Closed
wants to merge 12 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
98 changes: 43 additions & 55 deletions compiler/rustc_expand/src/mbe/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,11 @@ use std::collections::hash_map::Entry;
use std::{mem, slice};

use ast::token::IdentIsRaw;
use rustc_ast as ast;
use rustc_ast::token::NtPatKind::*;
use rustc_ast::token::TokenKind::*;
use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
use rustc_ast::{DUMMY_NODE_ID, NodeId};
use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId};
use rustc_ast_pretty::pprust;
use rustc_attr::{self as attr, TransparencyError};
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
Expand Down Expand Up @@ -370,34 +369,32 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
pub fn compile_declarative_macro(
sess: &Session,
features: &Features,
def: &ast::Item,
macro_def: &ast::MacroDef,
ident: Ident,
attrs: &[ast::Attribute],
span: Span,
node_id: NodeId,
edition: Edition,
) -> (SyntaxExtension, Vec<(usize, Span)>) {
debug!("compile_declarative_macro: {:?}", def);
let mk_syn_ext = |expander| {
SyntaxExtension::new(
sess,
features,
SyntaxExtensionKind::LegacyBang(expander),
def.span,
span,
Vec::new(),
edition,
def.ident.name,
&def.attrs,
def.id != DUMMY_NODE_ID,
ident.name,
attrs,
node_id != DUMMY_NODE_ID,
)
};
let dummy_syn_ext = |guar| (mk_syn_ext(Box::new(DummyExpander(guar))), Vec::new());

let dcx = sess.dcx();
let lhs_nm = Ident::new(sym::lhs, def.span);
let rhs_nm = Ident::new(sym::rhs, def.span);
let lhs_nm = Ident::new(sym::lhs, span);
let rhs_nm = Ident::new(sym::rhs, span);
let tt_spec = Some(NonterminalKind::TT);

let macro_def = match &def.kind {
ast::ItemKind::MacroDef(def) => def,
_ => unreachable!(),
};
let macro_rules = macro_def.macro_rules;

// Parse the macro_rules! invocation
Expand All @@ -410,25 +407,22 @@ pub fn compile_declarative_macro(
let argument_gram = vec![
mbe::TokenTree::Sequence(DelimSpan::dummy(), mbe::SequenceRepetition {
tts: vec![
mbe::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
mbe::TokenTree::token(token::FatArrow, def.span),
mbe::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
mbe::TokenTree::MetaVarDecl(span, lhs_nm, tt_spec),
mbe::TokenTree::token(token::FatArrow, span),
mbe::TokenTree::MetaVarDecl(span, rhs_nm, tt_spec),
],
separator: Some(Token::new(
if macro_rules { token::Semi } else { token::Comma },
def.span,
)),
kleene: mbe::KleeneToken::new(mbe::KleeneOp::OneOrMore, def.span),
separator: Some(Token::new(if macro_rules { token::Semi } else { token::Comma }, span)),
kleene: mbe::KleeneToken::new(mbe::KleeneOp::OneOrMore, span),
num_captures: 2,
}),
// to phase into semicolon-termination instead of semicolon-separation
mbe::TokenTree::Sequence(DelimSpan::dummy(), mbe::SequenceRepetition {
tts: vec![mbe::TokenTree::token(
if macro_rules { token::Semi } else { token::Comma },
def.span,
span,
)],
separator: None,
kleene: mbe::KleeneToken::new(mbe::KleeneOp::ZeroOrMore, def.span),
kleene: mbe::KleeneToken::new(mbe::KleeneOp::ZeroOrMore, span),
num_captures: 0,
}),
];
Expand Down Expand Up @@ -460,15 +454,15 @@ pub fn compile_declarative_macro(
};

let s = parse_failure_msg(&token, track.get_expected_token());
let sp = token.span.substitute_dummy(def.span);
let sp = token.span.substitute_dummy(span);
let mut err = sess.dcx().struct_span_err(sp, s);
err.span_label(sp, msg);
annotate_doc_comment(&mut err, sess.source_map(), sp);
let guar = err.emit();
return dummy_syn_ext(guar);
}
Error(sp, msg) => {
let guar = sess.dcx().span_err(sp.substitute_dummy(def.span), msg);
let guar = sess.dcx().span_err(sp.substitute_dummy(span), msg);
return dummy_syn_ext(guar);
}
ErrorReported(guar) => {
Expand All @@ -489,21 +483,21 @@ pub fn compile_declarative_macro(
&TokenStream::new(vec![tt.clone()]),
true,
sess,
def.id,
node_id,
features,
edition,
)
.pop()
.unwrap();
// We don't handle errors here, the driver will abort
// after parsing/expansion. We can report every error in every macro this way.
check_emission(check_lhs_nt_follows(sess, def, &tt));
check_emission(check_lhs_nt_follows(sess, node_id, &tt));
return tt;
}
sess.dcx().span_bug(def.span, "wrong-structured lhs")
sess.dcx().span_bug(span, "wrong-structured lhs")
})
.collect::<Vec<mbe::TokenTree>>(),
_ => sess.dcx().span_bug(def.span, "wrong-structured lhs"),
_ => sess.dcx().span_bug(span, "wrong-structured lhs"),
};

let rhses = match &argument_map[&MacroRulesNormalizedIdent::new(rhs_nm)] {
Expand All @@ -515,17 +509,17 @@ pub fn compile_declarative_macro(
&TokenStream::new(vec![tt.clone()]),
false,
sess,
def.id,
node_id,
features,
edition,
)
.pop()
.unwrap();
}
sess.dcx().span_bug(def.span, "wrong-structured rhs")
sess.dcx().span_bug(span, "wrong-structured rhs")
})
.collect::<Vec<mbe::TokenTree>>(),
_ => sess.dcx().span_bug(def.span, "wrong-structured rhs"),
_ => sess.dcx().span_bug(span, "wrong-structured rhs"),
};

for rhs in &rhses {
Expand All @@ -537,15 +531,9 @@ pub fn compile_declarative_macro(
check_emission(check_lhs_no_empty_seq(sess, slice::from_ref(lhs)));
}

check_emission(macro_check::check_meta_variables(
&sess.psess,
def.id,
def.span,
&lhses,
&rhses,
));
check_emission(macro_check::check_meta_variables(&sess.psess, node_id, span, &lhses, &rhses));

let (transparency, transparency_error) = attr::find_transparency(&def.attrs, macro_rules);
let (transparency, transparency_error) = attr::find_transparency(attrs, macro_rules);
match transparency_error {
Some(TransparencyError::UnknownTransparency(value, span)) => {
dcx.span_err(span, format!("unknown macro transparency: `{value}`"));
Expand All @@ -564,7 +552,7 @@ pub fn compile_declarative_macro(

// Compute the spans of the macro rules for unused rule linting.
// Also, we are only interested in non-foreign macros.
let rule_spans = if def.id != DUMMY_NODE_ID {
let rule_spans = if node_id != DUMMY_NODE_ID {
lhses
.iter()
.zip(rhses.iter())
Expand All @@ -590,15 +578,15 @@ pub fn compile_declarative_macro(
mbe::TokenTree::Delimited(.., delimited) => {
mbe::macro_parser::compute_locs(&delimited.tts)
}
_ => sess.dcx().span_bug(def.span, "malformed macro lhs"),
_ => sess.dcx().span_bug(span, "malformed macro lhs"),
}
})
.collect();

let expander = Box::new(MacroRulesMacroExpander {
name: def.ident,
span: def.span,
node_id: def.id,
name: ident,
span,
node_id,
transparency,
lhses,
rhses,
Expand All @@ -608,13 +596,13 @@ pub fn compile_declarative_macro(

fn check_lhs_nt_follows(
sess: &Session,
def: &ast::Item,
node_id: NodeId,
lhs: &mbe::TokenTree,
) -> Result<(), ErrorGuaranteed> {
// lhs is going to be like TokenTree::Delimited(...), where the
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
if let mbe::TokenTree::Delimited(.., delimited) = lhs {
check_matcher(sess, def, &delimited.tts)
check_matcher(sess, node_id, &delimited.tts)
} else {
let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
Err(sess.dcx().span_err(lhs.span(), msg))
Expand Down Expand Up @@ -686,12 +674,12 @@ fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed

fn check_matcher(
sess: &Session,
def: &ast::Item,
node_id: NodeId,
matcher: &[mbe::TokenTree],
) -> Result<(), ErrorGuaranteed> {
let first_sets = FirstSets::new(matcher);
let empty_suffix = TokenSet::empty();
check_matcher_core(sess, def, &first_sets, matcher, &empty_suffix)?;
check_matcher_core(sess, node_id, &first_sets, matcher, &empty_suffix)?;
Ok(())
}

Expand Down Expand Up @@ -1028,7 +1016,7 @@ impl<'tt> TokenSet<'tt> {
// see `FirstSets::new`.
fn check_matcher_core<'tt>(
sess: &Session,
def: &ast::Item,
node_id: NodeId,
first_sets: &FirstSets<'tt>,
matcher: &'tt [mbe::TokenTree],
follow: &TokenSet<'tt>,
Expand Down Expand Up @@ -1082,7 +1070,7 @@ fn check_matcher_core<'tt>(
token::CloseDelim(d.delim),
span.close,
));
check_matcher_core(sess, def, first_sets, &d.tts, &my_suffix)?;
check_matcher_core(sess, node_id, first_sets, &d.tts, &my_suffix)?;
// don't track non NT tokens
last.replace_with_irrelevant();

Expand Down Expand Up @@ -1114,7 +1102,7 @@ fn check_matcher_core<'tt>(
// At this point, `suffix_first` is built, and
// `my_suffix` is some TokenSet that we can use
// for checking the interior of `seq_rep`.
let next = check_matcher_core(sess, def, first_sets, &seq_rep.tts, my_suffix)?;
let next = check_matcher_core(sess, node_id, first_sets, &seq_rep.tts, my_suffix)?;
if next.maybe_empty {
last.add_all(&next);
} else {
Expand Down Expand Up @@ -1144,7 +1132,7 @@ fn check_matcher_core<'tt>(
// macro. (See #86567.)
// Macros defined in the current crate have a real node id,
// whereas macros from an external crate have a dummy id.
if def.id != DUMMY_NODE_ID
if node_id != DUMMY_NODE_ID
&& matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
&& matches!(
next_token,
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_metadata/src/creader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ use rustc_session::lint::{self, BuiltinLintDiag};
use rustc_session::output::validate_crate_name;
use rustc_session::search_paths::PathKind;
use rustc_span::edition::Edition;
use rustc_span::symbol::{Symbol, sym};
use rustc_span::symbol::{Ident, Symbol, sym};
use rustc_span::{DUMMY_SP, Span};
use rustc_target::spec::{PanicStrategy, Target, TargetTriple};
use tracing::{debug, info, trace};
Expand Down Expand Up @@ -97,7 +97,7 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> {
}

pub enum LoadedMacro {
MacroDef(ast::Item, Edition),
MacroDef { def: MacroDef, ident: Ident, attrs: AttrVec, span: Span, edition: Edition },
ProcMacro(SyntaxExtension),
}

Expand Down
28 changes: 8 additions & 20 deletions compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use std::any::Any;
use std::mem;

use rustc_ast as ast;
use rustc_attr::Deprecation;
use rustc_data_structures::sync::Lrc;
use rustc_hir::def::{CtorKind, DefKind, Res};
Expand Down Expand Up @@ -592,27 +591,16 @@ impl CStore {

let data = self.get_crate_data(id.krate);
if data.root.is_proc_macro_crate() {
return LoadedMacro::ProcMacro(data.load_proc_macro(id.index, tcx));
}

let span = data.get_span(id.index, sess);

LoadedMacro::MacroDef(
ast::Item {
LoadedMacro::ProcMacro(data.load_proc_macro(id.index, tcx))
} else {
LoadedMacro::MacroDef {
def: data.get_macro(id.index, sess),
ident: data.item_ident(id.index, sess),
id: ast::DUMMY_NODE_ID,
span,
attrs: data.get_item_attrs(id.index, sess).collect(),
kind: ast::ItemKind::MacroDef(data.get_macro(id.index, sess)),
vis: ast::Visibility {
span: span.shrink_to_lo(),
kind: ast::VisibilityKind::Inherited,
tokens: None,
},
tokens: None,
},
data.root.edition,
)
span: data.get_span(id.index, sess),
edition: data.root.edition,
}
}
}

pub fn def_span_untracked(&self, def_id: DefId, sess: &Session) -> Span {
Expand Down
4 changes: 3 additions & 1 deletion compiler/rustc_resolve/src/build_reduced_graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {

let loaded_macro = self.cstore().load_macro_untracked(def_id, self.tcx);
let macro_data = match loaded_macro {
LoadedMacro::MacroDef(item, edition) => self.compile_macro(&item, edition),
LoadedMacro::MacroDef { def, ident, attrs, span, edition } => {
self.compile_macro(&def, ident, &attrs, span, ast::DUMMY_NODE_ID, edition)
}
LoadedMacro::ProcMacro(ext) => MacroData::new(Lrc::new(ext)),
};

Expand Down
6 changes: 4 additions & 2 deletions compiler/rustc_resolve/src/def_collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -199,8 +199,10 @@ impl<'a, 'ra, 'tcx> visit::Visitor<'a> for DefCollector<'a, 'ra, 'tcx> {
},
ItemKind::Const(..) => DefKind::Const,
ItemKind::Fn(..) | ItemKind::Delegation(..) => DefKind::Fn,
ItemKind::MacroDef(..) => {
let macro_data = self.resolver.compile_macro(i, self.resolver.tcx.sess.edition());
ItemKind::MacroDef(def) => {
let edition = self.resolver.tcx.sess.edition();
let macro_data =
self.resolver.compile_macro(def, i.ident, &i.attrs, i.span, i.id, edition);
let macro_kind = macro_data.ext.macro_kind();
opt_macro_data = Some(macro_data);
DefKind::Macro(macro_kind)
Expand Down
Loading
Loading