Skip to content

Commit

Permalink
Take &mut DocContext in passes
Browse files Browse the repository at this point in the history
This should hopefully allow for less interior mutability.
  • Loading branch information
jyn514 committed Feb 17, 2021
1 parent 8fe989d commit 6da9e3c
Show file tree
Hide file tree
Showing 14 changed files with 49 additions and 83 deletions.
2 changes: 1 addition & 1 deletion src/librustdoc/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -627,7 +627,7 @@ crate fn run_global_ctxt(
};
if run {
debug!("running pass {}", p.pass.name);
krate = ctxt.tcx.sess.time(p.pass.name, || (p.pass.run)(krate, &ctxt));
krate = ctxt.tcx.sess.time(p.pass.name, || (p.pass.run)(krate, &mut ctxt));
}
}

Expand Down
10 changes: 3 additions & 7 deletions src/librustdoc/passes/calculate_doc_coverage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ crate const CALCULATE_DOC_COVERAGE: Pass = Pass {
description: "counts the number of items with and without documentation",
};

fn calculate_doc_coverage(krate: clean::Crate, ctx: &DocContext<'_>) -> clean::Crate {
let mut calc = CoverageCalculator::new(ctx);
fn calculate_doc_coverage(krate: clean::Crate, ctx: &mut DocContext<'_>) -> clean::Crate {
let mut calc = CoverageCalculator { items: Default::default(), ctx };
let krate = calc.fold_crate(krate);

calc.print_results();
Expand Down Expand Up @@ -101,7 +101,7 @@ impl ops::AddAssign for ItemCount {

struct CoverageCalculator<'a, 'b> {
items: BTreeMap<FileName, ItemCount>,
ctx: &'a DocContext<'b>,
ctx: &'a mut DocContext<'b>,
}

fn limit_filename_len(filename: String) -> String {
Expand All @@ -115,10 +115,6 @@ fn limit_filename_len(filename: String) -> String {
}

impl<'a, 'b> CoverageCalculator<'a, 'b> {
fn new(ctx: &'a DocContext<'b>) -> CoverageCalculator<'a, 'b> {
CoverageCalculator { items: Default::default(), ctx }
}

fn to_json(&self) -> String {
serde_json::to_string(
&self
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/passes/check_code_block_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ crate const CHECK_CODE_BLOCK_SYNTAX: Pass = Pass {
description: "validates syntax inside Rust code blocks",
};

crate fn check_code_block_syntax(krate: clean::Crate, cx: &DocContext<'_>) -> clean::Crate {
crate fn check_code_block_syntax(krate: clean::Crate, cx: &mut DocContext<'_>) -> clean::Crate {
SyntaxChecker { cx }.fold_crate(krate)
}

Expand Down
59 changes: 26 additions & 33 deletions src/librustdoc/passes/collect_intra_doc_links.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,13 @@ crate const COLLECT_INTRA_DOC_LINKS: Pass = Pass {
description: "resolves intra-doc links",
};

crate fn collect_intra_doc_links(krate: Crate, cx: &DocContext<'_>) -> Crate {
LinkCollector::new(cx).fold_crate(krate)
crate fn collect_intra_doc_links(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
LinkCollector {
cx,
mod_ids: Vec::new(),
kind_side_channel: Cell::new(None),
visited_links: FxHashMap::default(),
}.fold_crate(krate)
}

/// Top-level errors emitted by this pass.
Expand Down Expand Up @@ -257,7 +262,7 @@ struct CachedLink {
}

struct LinkCollector<'a, 'tcx> {
cx: &'a DocContext<'tcx>,
cx: &'a mut DocContext<'tcx>,
/// A stack of modules used to decide what scope to resolve in.
///
/// The last module will be used if the parent scope of the current item is
Expand All @@ -273,15 +278,6 @@ struct LinkCollector<'a, 'tcx> {
}

impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
fn new(cx: &'a DocContext<'tcx>) -> Self {
LinkCollector {
cx,
mod_ids: Vec::new(),
kind_side_channel: Cell::new(None),
visited_links: FxHashMap::default(),
}
}

/// Given a full link, parse it as an [enum struct variant].
///
/// In particular, this will return an error whenever there aren't three
Expand All @@ -293,7 +289,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
path_str: &'path str,
module_id: DefId,
) -> Result<(Res, Option<String>), ErrorKind<'path>> {
let cx = self.cx;
let tcx = self.cx.tcx;
let no_res = || ResolutionFailure::NotResolved {
module_id,
partial_res: None,
Expand All @@ -317,7 +313,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
// If there's no third component, we saw `[a::b]` before and it failed to resolve.
// So there's no partial res.
.ok_or_else(no_res)?;
let ty_res = cx
let ty_res = self.cx
.enter_resolver(|resolver| {
resolver.resolve_str_path_error(DUMMY_SP, &path, TypeNS, module_id)
})
Expand All @@ -326,18 +322,17 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {

match ty_res {
Res::Def(DefKind::Enum, did) => {
if cx
.tcx
if tcx
.inherent_impls(did)
.iter()
.flat_map(|imp| cx.tcx.associated_items(*imp).in_definition_order())
.flat_map(|imp| tcx.associated_items(*imp).in_definition_order())
.any(|item| item.ident.name == variant_name)
{
// This is just to let `fold_item` know that this shouldn't be considered;
// it's a bug for the error to make it to the user
return Err(ResolutionFailure::Dummy.into());
}
match cx.tcx.type_of(did).kind() {
match tcx.type_of(did).kind() {
ty::Adt(def, _) if def.is_enum() => {
if def.all_fields().any(|item| item.ident.name == variant_field_name) {
Ok((
Expand Down Expand Up @@ -380,16 +375,16 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
item_name: Symbol,
item_str: &'path str,
) -> Result<(Res, Option<String>), ErrorKind<'path>> {
let cx = self.cx;
let tcx = self.cx.tcx;

prim_ty
.impls(cx.tcx)
.impls(tcx)
.into_iter()
.find_map(|&impl_| {
cx.tcx
tcx
.associated_items(impl_)
.find_by_name_and_namespace(
cx.tcx,
tcx,
Ident::with_dummy_span(item_name),
ns,
impl_,
Expand Down Expand Up @@ -434,9 +429,8 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
path_str: &'a str,
module_id: DefId,
) -> Result<Res, ResolutionFailure<'a>> {
let cx = self.cx;
let path = ast::Path::from_ident(Ident::from_str(path_str));
cx.enter_resolver(|resolver| {
self.cx.enter_resolver(|resolver| {
// FIXME(jynelson): does this really need 3 separate lookups?
if let Ok((Some(ext), res)) = resolver.resolve_macro_path(
&path,
Expand Down Expand Up @@ -498,7 +492,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
module_id: DefId,
extra_fragment: &Option<String>,
) -> Result<(Res, Option<String>), ErrorKind<'path>> {
let cx = self.cx;
let cx = &self.cx;

if let Some(res) = self.resolve_path(path_str, ns, module_id) {
match res {
Expand Down Expand Up @@ -948,12 +942,11 @@ impl LinkCollector<'_, '_> {
return None;
}

let cx = self.cx;
let link = ori_link.link.replace("`", "");
let parts = link.split('#').collect::<Vec<_>>();
let (link, extra_fragment) = if parts.len() > 2 {
// A valid link can't have multiple #'s
anchor_failure(cx, &item, &link, dox, ori_link.range, AnchorFailure::MultipleAnchors);
anchor_failure(self.cx, &item, &link, dox, ori_link.range, AnchorFailure::MultipleAnchors);
return None;
} else if parts.len() == 2 {
if parts[0].trim().is_empty() {
Expand Down Expand Up @@ -1105,7 +1098,7 @@ impl LinkCollector<'_, '_> {
if matches!(disambiguator, Some(Disambiguator::Primitive)) {
if fragment.is_some() {
anchor_failure(
cx,
self.cx,
&item,
path_str,
dox,
Expand All @@ -1119,7 +1112,7 @@ impl LinkCollector<'_, '_> {
} else {
// `[char]` when a `char` module is in scope
let candidates = vec![res, prim];
ambiguity_error(cx, &item, path_str, dox, ori_link.range, candidates);
ambiguity_error(self.cx, &item, path_str, dox, ori_link.range, candidates);
return None;
}
}
Expand All @@ -1140,7 +1133,7 @@ impl LinkCollector<'_, '_> {
suggest_disambiguator(resolved, diag, path_str, dox, sp, &ori_link.range);
};
report_diagnostic(
cx,
self.cx,
BROKEN_INTRA_DOC_LINKS,
&msg,
&item,
Expand Down Expand Up @@ -1187,7 +1180,7 @@ impl LinkCollector<'_, '_> {
if self.cx.tcx.privacy_access_levels(LOCAL_CRATE).is_exported(hir_src)
&& !self.cx.tcx.privacy_access_levels(LOCAL_CRATE).is_exported(hir_dst)
{
privacy_error(cx, &item, &path_str, dox, &ori_link);
privacy_error(self.cx, &item, &path_str, dox, &ori_link);
}
}

Expand All @@ -1211,7 +1204,7 @@ impl LinkCollector<'_, '_> {
&& !self.cx.tcx.features().intra_doc_pointers
{
let span = super::source_span_for_markdown_range(
cx,
self.cx,
dox,
&ori_link.range,
&item.attrs,
Expand Down Expand Up @@ -1243,7 +1236,7 @@ impl LinkCollector<'_, '_> {
}
Res::Def(kind, id) => {
verify(kind, id)?;
let id = clean::register_res(cx, rustc_hir::def::Res::Def(kind, id));
let id = clean::register_res(self.cx, rustc_hir::def::Res::Def(kind, id));
Some(ItemLink { link: ori_link.link, link_text, did: Some(id), fragment })
}
}
Expand Down
10 changes: 2 additions & 8 deletions src/librustdoc/passes/collect_trait_impls.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ crate const COLLECT_TRAIT_IMPLS: Pass = Pass {
description: "retrieves trait impls for items in the crate",
};

crate fn collect_trait_impls(krate: Crate, cx: &DocContext<'_>) -> Crate {
let mut synth = SyntheticImplCollector::new(cx);
crate fn collect_trait_impls(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
let mut synth = SyntheticImplCollector { cx, impls: Vec::new() };
let mut krate = cx.sess().time("collect_synthetic_impls", || synth.fold_crate(krate));

let prims: FxHashSet<PrimitiveType> = krate.primitives.iter().map(|p| p.1).collect();
Expand Down Expand Up @@ -164,12 +164,6 @@ struct SyntheticImplCollector<'a, 'tcx> {
impls: Vec<Item>,
}

impl<'a, 'tcx> SyntheticImplCollector<'a, 'tcx> {
fn new(cx: &'a DocContext<'tcx>) -> Self {
SyntheticImplCollector { cx, impls: Vec::new() }
}
}

impl<'a, 'tcx> DocFolder for SyntheticImplCollector<'a, 'tcx> {
fn fold_item(&mut self, i: Item) -> Option<Item> {
if i.is_struct() || i.is_enum() || i.is_union() {
Expand Down
15 changes: 4 additions & 11 deletions src/librustdoc/passes/doc_test_lints.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,27 +19,20 @@ crate const CHECK_PRIVATE_ITEMS_DOC_TESTS: Pass = Pass {
};

struct PrivateItemDocTestLinter<'a, 'tcx> {
cx: &'a DocContext<'tcx>,
cx: &'a mut DocContext<'tcx>,
}

impl<'a, 'tcx> PrivateItemDocTestLinter<'a, 'tcx> {
fn new(cx: &'a DocContext<'tcx>) -> Self {
PrivateItemDocTestLinter { cx }
}
}

crate fn check_private_items_doc_tests(krate: Crate, cx: &DocContext<'_>) -> Crate {
let mut coll = PrivateItemDocTestLinter::new(cx);
crate fn check_private_items_doc_tests(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
let mut coll = PrivateItemDocTestLinter { cx };

coll.fold_crate(krate)
}

impl<'a, 'tcx> DocFolder for PrivateItemDocTestLinter<'a, 'tcx> {
fn fold_item(&mut self, item: Item) -> Option<Item> {
let cx = self.cx;
let dox = item.attrs.collapsed_doc_value().unwrap_or_else(String::new);

look_for_tests(&cx, &dox, &item);
look_for_tests(self.cx, &dox, &item);

Some(self.fold_item_recur(item))
}
Expand Down
12 changes: 3 additions & 9 deletions src/librustdoc/passes/html_tags.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,20 +16,14 @@ crate const CHECK_INVALID_HTML_TAGS: Pass = Pass {
};

struct InvalidHtmlTagsLinter<'a, 'tcx> {
cx: &'a DocContext<'tcx>,
cx: &'a mut DocContext<'tcx>,
}

impl<'a, 'tcx> InvalidHtmlTagsLinter<'a, 'tcx> {
fn new(cx: &'a DocContext<'tcx>) -> Self {
InvalidHtmlTagsLinter { cx }
}
}

crate fn check_invalid_html_tags(krate: Crate, cx: &DocContext<'_>) -> Crate {
crate fn check_invalid_html_tags(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
if !cx.tcx.sess.is_nightly_build() {
krate
} else {
let mut coll = InvalidHtmlTagsLinter::new(cx);
let mut coll = InvalidHtmlTagsLinter { cx };

coll.fold_crate(krate)
}
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/passes/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ crate use self::html_tags::CHECK_INVALID_HTML_TAGS;
#[derive(Copy, Clone)]
crate struct Pass {
crate name: &'static str,
crate run: fn(clean::Crate, &DocContext<'_>) -> clean::Crate,
crate run: fn(clean::Crate, &mut DocContext<'_>) -> clean::Crate,
crate description: &'static str,
}

Expand Down
10 changes: 3 additions & 7 deletions src/librustdoc/passes/non_autolinks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,11 @@ const URL_REGEX: &str = concat!(
);

struct NonAutolinksLinter<'a, 'tcx> {
cx: &'a DocContext<'tcx>,
cx: &'a mut DocContext<'tcx>,
regex: Regex,
}

impl<'a, 'tcx> NonAutolinksLinter<'a, 'tcx> {
fn new(cx: &'a DocContext<'tcx>) -> Self {
Self { cx, regex: Regex::new(URL_REGEX).expect("failed to build regex") }
}

fn find_raw_urls(
&self,
text: &str,
Expand All @@ -52,11 +48,11 @@ impl<'a, 'tcx> NonAutolinksLinter<'a, 'tcx> {
}
}

crate fn check_non_autolinks(krate: Crate, cx: &DocContext<'_>) -> Crate {
crate fn check_non_autolinks(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
if !cx.tcx.sess.is_nightly_build() {
krate
} else {
let mut coll = NonAutolinksLinter::new(cx);
let mut coll = NonAutolinksLinter { cx, regex: Regex::new(URL_REGEX).expect("failed to build regex") };

coll.fold_crate(krate)
}
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/passes/propagate_doc_cfg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ crate const PROPAGATE_DOC_CFG: Pass = Pass {
description: "propagates `#[doc(cfg(...))]` to child items",
};

crate fn propagate_doc_cfg(cr: Crate, _: &DocContext<'_>) -> Crate {
crate fn propagate_doc_cfg(cr: Crate, _: &mut DocContext<'_>) -> Crate {
CfgPropagator { parent_cfg: None }.fold_crate(cr)
}

Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/passes/strip_hidden.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ crate const STRIP_HIDDEN: Pass = Pass {
};

/// Strip items marked `#[doc(hidden)]`
crate fn strip_hidden(krate: clean::Crate, _: &DocContext<'_>) -> clean::Crate {
crate fn strip_hidden(krate: clean::Crate, _: &mut DocContext<'_>) -> clean::Crate {
let mut retained = DefIdSet::default();

// strip all #[doc(hidden)] items
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/passes/strip_priv_imports.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,6 @@ crate const STRIP_PRIV_IMPORTS: Pass = Pass {
description: "strips all private import statements (`use`, `extern crate`) from a crate",
};

crate fn strip_priv_imports(krate: clean::Crate, _: &DocContext<'_>) -> clean::Crate {
crate fn strip_priv_imports(krate: clean::Crate, _: &mut DocContext<'_>) -> clean::Crate {
ImportStripper.fold_crate(krate)
}
2 changes: 1 addition & 1 deletion src/librustdoc/passes/strip_private.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ crate const STRIP_PRIVATE: Pass = Pass {

/// Strip private items from the point of view of a crate or externally from a
/// crate, specified by the `xcrate` flag.
crate fn strip_private(mut krate: clean::Crate, cx: &DocContext<'_>) -> clean::Crate {
crate fn strip_private(mut krate: clean::Crate, cx: &mut DocContext<'_>) -> clean::Crate {
// This stripper collects all *retained* nodes.
let mut retained = DefIdSet::default();
let access_levels = cx.renderinfo.borrow().access_levels.clone();
Expand Down
Loading

0 comments on commit 6da9e3c

Please sign in to comment.