From 5b8e386baececd1501443cb043ad43e032c0cbdb Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 3 Dec 2023 20:20:38 +0100 Subject: [PATCH 1/5] Improve macro descension API --- crates/hir-expand/src/lib.rs | 2 - crates/hir/src/lib.rs | 4 +- crates/hir/src/semantics.rs | 176 ++++++++++-------- .../extract_expressions_from_format_string.rs | 9 +- .../src/handlers/extract_function.rs | 8 +- crates/ide-db/src/helpers.rs | 4 +- crates/ide-db/src/search.rs | 8 +- crates/ide/src/call_hierarchy.rs | 4 +- crates/ide/src/doc_links.rs | 8 +- crates/ide/src/expand_macro.rs | 8 +- crates/ide/src/extend_selection.rs | 19 +- crates/ide/src/goto_declaration.rs | 4 +- crates/ide/src/goto_definition.rs | 4 +- crates/ide/src/goto_implementation.rs | 4 +- crates/ide/src/goto_type_definition.rs | 3 +- crates/ide/src/highlight_related.rs | 4 +- crates/ide/src/hover.rs | 12 +- crates/ide/src/moniker.rs | 4 +- crates/ide/src/references.rs | 4 +- crates/ide/src/signature_help.rs | 7 +- crates/ide/src/syntax_highlighting.rs | 18 +- 21 files changed, 177 insertions(+), 137 deletions(-) diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 602babcc9928..71c98b2770aa 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -629,8 +629,6 @@ impl ExpansionInfo { pub fn map_range_down<'a>( &'a self, span: SpanData, - // FIXME: use this for range mapping, so that we can resolve inline format args - _relative_token_offset: Option, ) -> Option> + 'a> { let tokens = self .exp_map diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 22e14b618126..53e60c5862ad 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -92,7 +92,9 @@ pub use crate::{ attrs::{resolve_doc_path_on, HasAttrs}, diagnostics::*, has_source::HasSource, - semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits}, + semantics::{ + DescendPreference, PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits, + }, }; // Be careful with these re-exports. diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index ed3d3f1a3b69..7d3c89ddb61f 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -2,7 +2,11 @@ mod source_to_def; -use std::{cell::RefCell, fmt, iter, mem, ops}; +use std::{ + cell::RefCell, + fmt, iter, mem, + ops::{self, ControlFlow}, +}; use base_db::{FileId, FileRange}; use either::Either; @@ -39,6 +43,12 @@ use crate::{ TypeAlias, TypeParam, VariantDef, }; +pub enum DescendPreference { + SameText, + SameKind, + None, +} + #[derive(Debug, Clone, PartialEq, Eq)] pub enum PathResolution { /// An item @@ -397,6 +407,7 @@ impl<'db> SemanticsImpl<'db> { // This might not be the correct way to do this, but it works for now let mut res = smallvec![]; let tokens = (|| { + // FIXME: the trivia skipping should not be necessary let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?; let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?; Some((first, last)) @@ -407,18 +418,19 @@ impl<'db> SemanticsImpl<'db> { }; if first == last { + // node is just the token, so descend the token self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| { if let Some(node) = value.parent_ancestors().find_map(N::cast) { res.push(node) } - false + ControlFlow::Continue(()) }); } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; self.descend_into_macros_impl(first, 0.into(), &mut |token| { scratch.push(token); - false + ControlFlow::Continue(()) }); let mut scratch = scratch.into_iter(); @@ -441,7 +453,7 @@ impl<'db> SemanticsImpl<'db> { } } } - false + ControlFlow::Continue(()) }, ); } @@ -453,32 +465,43 @@ impl<'db> SemanticsImpl<'db> { /// be considered for the mapping in case of inline format args. pub fn descend_into_macros( &self, + mode: DescendPreference, token: SyntaxToken, offset: TextSize, ) -> SmallVec<[SyntaxToken; 1]> { - let mut res = smallvec![]; - self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| { - res.push(value); - false - }); - res - } - - /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token. - /// - /// Returns the original non descended token if none of the mapped counterparts have the same text. - pub fn descend_into_macros_with_same_text( - &self, - token: SyntaxToken, - offset: TextSize, - ) -> SmallVec<[SyntaxToken; 1]> { - let text = token.text(); + enum Dp<'t> { + SameText(&'t str), + SameKind(SyntaxKind), + None, + } + let fetch_kind = |token: &SyntaxToken| match token.parent() { + Some(node) => match node.kind() { + kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind, + _ => token.kind(), + }, + None => token.kind(), + }; + let mode = match mode { + DescendPreference::SameText => Dp::SameText(token.text()), + DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)), + DescendPreference::None => Dp::None, + }; let mut res = smallvec![]; self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| { - if value.text() == text { + let is_a_match = match mode { + Dp::SameText(text) => value.text() == text, + Dp::SameKind(preferred_kind) => { + let kind = fetch_kind(&value); + kind == preferred_kind + // special case for derive macros + || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF) + } + Dp::None => true, + }; + if is_a_match { res.push(value); } - false + ControlFlow::Continue(()) }); if res.is_empty() { res.push(token); @@ -486,45 +509,48 @@ impl<'db> SemanticsImpl<'db> { res } - pub fn descend_into_macros_with_kind_preference( + pub fn descend_into_macros_single( &self, + mode: DescendPreference, token: SyntaxToken, offset: TextSize, ) -> SyntaxToken { + enum Dp<'t> { + SameText(&'t str), + SameKind(SyntaxKind), + None, + } let fetch_kind = |token: &SyntaxToken| match token.parent() { Some(node) => match node.kind() { - kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => { - node.parent().map_or(kind, |it| it.kind()) - } + kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind, _ => token.kind(), }, None => token.kind(), }; - let preferred_kind = fetch_kind(&token); - let mut res = None; + let mode = match mode { + DescendPreference::SameText => Dp::SameText(token.text()), + DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)), + DescendPreference::None => Dp::None, + }; + let mut res = token.clone(); self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| { - if fetch_kind(&value) == preferred_kind { - res = Some(value); - true - } else { - if let None = res { - res = Some(value) + let is_a_match = match mode { + Dp::SameText(text) => value.text() == text, + Dp::SameKind(preferred_kind) => { + let kind = fetch_kind(&value); + kind == preferred_kind + // special case for derive macros + || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF) } - false + Dp::None => true, + }; + if is_a_match { + res = value; + ControlFlow::Break(()) + } else { + ControlFlow::Continue(()) } }); - res.unwrap_or(token) - } - - /// Descend the token into its macro call if it is part of one, returning the token in the - /// expansion that it is associated with. If `offset` points into the token's range, it will - /// be considered for the mapping in case of inline format args. - pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken { - let mut res = token.clone(); - self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| { - res = value; - true - }); res } @@ -535,7 +561,7 @@ impl<'db> SemanticsImpl<'db> { // FIXME: We might want this to be Option to be able to opt out of subrange // mapping, specifically for node downmapping _offset: TextSize, - f: &mut dyn FnMut(InFile) -> bool, + f: &mut dyn FnMut(InFile) -> ControlFlow<()>, ) { // FIXME: Clean this up let _p = profile::span("descend_into_macros"); @@ -560,25 +586,24 @@ impl<'db> SemanticsImpl<'db> { let def_map = sa.resolver.def_map(); let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)]; - let mut process_expansion_for_token = - |stack: &mut SmallVec<_>, macro_file, _token: InFile<&_>| { - let expansion_info = cache - .entry(macro_file) - .or_insert_with(|| macro_file.expansion_info(self.db.upcast())); + let mut process_expansion_for_token = |stack: &mut SmallVec<_>, macro_file| { + let expansion_info = cache + .entry(macro_file) + .or_insert_with(|| macro_file.expansion_info(self.db.upcast())); - { - let InFile { file_id, value } = expansion_info.expanded(); - self.cache(value, file_id); - } + { + let InFile { file_id, value } = expansion_info.expanded(); + self.cache(value, file_id); + } - let mapped_tokens = expansion_info.map_range_down(span, None)?; - let len = stack.len(); + let mapped_tokens = expansion_info.map_range_down(span)?; + let len = stack.len(); - // requeue the tokens we got from mapping our current token down - stack.extend(mapped_tokens.map(Into::into)); - // if the length changed we have found a mapping for the token - (stack.len() != len).then_some(()) - }; + // requeue the tokens we got from mapping our current token down + stack.extend(mapped_tokens.map(Into::into)); + // if the length changed we have found a mapping for the token + (stack.len() != len).then_some(()) + }; // Remap the next token in the queue into a macro call its in, if it is not being remapped // either due to not being in a macro-call or because its unused push it into the result vec, @@ -598,7 +623,7 @@ impl<'db> SemanticsImpl<'db> { }); if let Some(call_id) = containing_attribute_macro_call { let file_id = call_id.as_macro_file(); - return process_expansion_for_token(&mut stack, file_id, token.as_ref()); + return process_expansion_for_token(&mut stack, file_id); } // Then check for token trees, that means we are either in a function-like macro or @@ -624,7 +649,7 @@ impl<'db> SemanticsImpl<'db> { it } }; - process_expansion_for_token(&mut stack, file_id, token.as_ref()) + process_expansion_for_token(&mut stack, file_id) } else if let Some(meta) = ast::Meta::cast(parent) { // attribute we failed expansion for earlier, this might be a derive invocation // or derive helper attribute @@ -646,11 +671,7 @@ impl<'db> SemanticsImpl<'db> { Some(call_id) => { // resolved to a derive let file_id = call_id.as_macro_file(); - return process_expansion_for_token( - &mut stack, - file_id, - token.as_ref(), - ); + return process_expansion_for_token(&mut stack, file_id); } None => Some(adt), } @@ -682,11 +703,8 @@ impl<'db> SemanticsImpl<'db> { def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?; let mut res = None; for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { - res = res.or(process_expansion_for_token( - &mut stack, - derive.as_macro_file(), - token.as_ref(), - )); + res = + res.or(process_expansion_for_token(&mut stack, derive.as_macro_file())); } res } else { @@ -695,7 +713,7 @@ impl<'db> SemanticsImpl<'db> { })() .is_none(); - if was_not_remapped && f(token) { + if was_not_remapped && f(token).is_break() { break; } } @@ -711,7 +729,7 @@ impl<'db> SemanticsImpl<'db> { offset: TextSize, ) -> impl Iterator + '_> + '_ { node.token_at_offset(offset) - .map(move |token| self.descend_into_macros(token, offset)) + .map(move |token| self.descend_into_macros(DescendPreference::None, token, offset)) .map(|descendants| { descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it)) }) diff --git a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 31a1ff496e13..55f2fd9f6ce8 100644 --- a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -1,4 +1,5 @@ use crate::{AssistContext, Assists}; +use hir::DescendPreference; use ide_db::{ assists::{AssistId, AssistKind}, syntax_helpers::{ @@ -34,9 +35,11 @@ pub(crate) fn extract_expressions_from_format_string( let fmt_string = ctx.find_token_at_offset::()?; let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; - let expanded_t = ast::String::cast( - ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone(), 0.into()), - )?; + let expanded_t = ast::String::cast(ctx.sema.descend_into_macros_single( + DescendPreference::SameKind, + fmt_string.syntax().clone(), + 0.into(), + ))?; if !is_format_string(&expanded_t) { return None; } diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index 6b48d1588152..9b892ac1e981 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -3,8 +3,8 @@ use std::iter; use ast::make; use either::Either; use hir::{ - HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, PathResolution, Semantics, - TypeInfo, TypeParam, + DescendPreference, HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, + PathResolution, Semantics, TypeInfo, TypeParam, }; use ide_db::{ defs::{Definition, NameRefClass}, @@ -751,7 +751,9 @@ impl FunctionBody { .descendants_with_tokens() .filter_map(SyntaxElement::into_token) .filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self])) - .flat_map(|t| sema.descend_into_macros(t, 0.into())) + .flat_map(|t| { + sema.descend_into_macros(DescendPreference::None, t, 0.into()) + }) .for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast))); } } diff --git a/crates/ide-db/src/helpers.rs b/crates/ide-db/src/helpers.rs index 330af442f754..d4b031879d11 100644 --- a/crates/ide-db/src/helpers.rs +++ b/crates/ide-db/src/helpers.rs @@ -3,7 +3,7 @@ use std::collections::VecDeque; use base_db::{FileId, SourceDatabaseExt}; -use hir::{Crate, ItemInNs, ModuleDef, Name, Semantics}; +use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics}; use syntax::{ ast::{self, make}, AstToken, SyntaxKind, SyntaxToken, TokenAtOffset, @@ -117,7 +117,7 @@ pub fn get_definition( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, ) -> Option { - for token in sema.descend_into_macros(token, 0.into()) { + for token in sema.descend_into_macros(DescendPreference::None, token, 0.into()) { let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); if let Some(&[x]) = def.as_deref() { return Some(x); diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index 2ce036c044ad..c1ed17503fe9 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -8,8 +8,8 @@ use std::mem; use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use hir::{ - AsAssocItem, DefWithBody, HasAttrs, HasSource, HirFileIdExt, InFile, InRealFile, ModuleSource, - Semantics, Visibility, + AsAssocItem, DefWithBody, DescendPreference, HasAttrs, HasSource, HirFileIdExt, InFile, + InRealFile, ModuleSource, Semantics, Visibility, }; use memchr::memmem::Finder; use nohash_hasher::IntMap; @@ -467,7 +467,9 @@ impl<'a> FindUsages<'a> { // every textual hit. That function is notoriously // expensive even for things that do not get down mapped // into macros. - sema.descend_into_macros(token, offset).into_iter().filter_map(|it| it.parent()) + sema.descend_into_macros(DescendPreference::None, token, offset) + .into_iter() + .filter_map(|it| it.parent()) }) }; diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index 6f41f51f80ec..70391cd847b9 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -1,6 +1,6 @@ //! Entry point for call-hierarchy -use hir::Semantics; +use hir::{DescendPreference, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, @@ -87,7 +87,7 @@ pub(crate) fn outgoing_calls( })?; let mut calls = CallLocations::default(); - sema.descend_into_macros(token, offset) + sema.descend_into_macros(DescendPreference::None, token, offset) .into_iter() .filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast)) .filter_map(|item| match item { diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index ac15b6aba618..97fa7dee306a 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -12,7 +12,9 @@ use pulldown_cmark_to_cmark::{cmark_resume_with_options, Options as CMarkOptions use stdx::format_to; use url::Url; -use hir::{db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs}; +use hir::{ + db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, DescendPreference, HasAttrs, +}; use ide_db::{ base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase}, defs::{Definition, NameClass, NameRefClass}, @@ -144,7 +146,7 @@ pub(crate) fn external_docs( kind if kind.is_trivia() => 0, _ => 1, })?; - let token = sema.descend_into_macros_single(token, offset); + let token = sema.descend_into_macros_single(DescendPreference::None, token, offset); let node = token.parent()?; let definition = match_ast! { @@ -286,7 +288,7 @@ impl DocCommentToken { let original_start = doc_token.text_range().start(); let relative_comment_offset = offset - original_start - prefix_len; - sema.descend_into_macros(doc_token, offset).into_iter().find_map(|t| { + sema.descend_into_macros(DescendPreference::None,doc_token, offset).into_iter().find_map(|t| { let (node, descended_prefix_len) = match_ast! { match t { ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?), diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index a70f335ada3c..cc878dc7196b 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -1,4 +1,4 @@ -use hir::{HirFileIdExt, InFile, Semantics}; +use hir::{DescendPreference, HirFileIdExt, InFile, Semantics}; use ide_db::{ base_db::FileId, helpers::pick_best_token, syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, @@ -40,8 +40,10 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< // struct Bar; // ``` - let derive = - sema.descend_into_macros(tok.clone(), 0.into()).into_iter().find_map(|descended| { + let derive = sema + .descend_into_macros(DescendPreference::None, tok.clone(), 0.into()) + .into_iter() + .find_map(|descended| { let hir_file = sema.hir_file_for(&descended.parent()?); if !hir_file.is_derive_attr_pseudo_expansion(db) { return None; diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index 9b2ff070c74b..1cdbf7840e75 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs @@ -1,6 +1,6 @@ use std::iter::successors; -use hir::Semantics; +use hir::{DescendPreference, Semantics}; use ide_db::RootDatabase; use syntax::{ algo::{self, skip_trivia_token}, @@ -140,10 +140,16 @@ fn extend_tokens_from_range( // compute original mapped token range let extended = { - let fst_expanded = - sema.descend_into_macros_single(first_token.clone(), original_range.start()); - let lst_expanded = - sema.descend_into_macros_single(last_token.clone(), original_range.end()); + let fst_expanded = sema.descend_into_macros_single( + DescendPreference::None, + first_token.clone(), + original_range.start(), + ); + let lst_expanded = sema.descend_into_macros_single( + DescendPreference::None, + last_token.clone(), + original_range.end(), + ); let mut lca = algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?; lca = shallowest_node(&lca); @@ -157,7 +163,8 @@ fn extend_tokens_from_range( let validate = |offset: TextSize| { let extended = &extended; move |token: &SyntaxToken| -> bool { - let expanded = sema.descend_into_macros_single(token.clone(), offset); + let expanded = + sema.descend_into_macros_single(DescendPreference::None, token.clone(), offset); let parent = match expanded.parent() { Some(it) => it, None => return false, diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs index 7e0fab42608b..ee94dff5fb30 100644 --- a/crates/ide/src/goto_declaration.rs +++ b/crates/ide/src/goto_declaration.rs @@ -1,4 +1,4 @@ -use hir::{AsAssocItem, Semantics}; +use hir::{AsAssocItem, DescendPreference, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, RootDatabase, @@ -29,7 +29,7 @@ pub(crate) fn goto_declaration( .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?; let range = original_token.text_range(); let info: Vec = sema - .descend_into_macros(original_token, offset) + .descend_into_macros(DescendPreference::None, original_token, offset) .iter() .filter_map(|token| { let parent = token.parent()?; diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 816f1bebee42..635f82686206 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -4,7 +4,7 @@ use crate::{ doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget, RangeInfo, TryToNav, }; -use hir::{AsAssocItem, AssocItem, Semantics}; +use hir::{AsAssocItem, AssocItem, DescendPreference, Semantics}; use ide_db::{ base_db::{AnchoredPath, FileId, FileLoader}, defs::{Definition, IdentClass}, @@ -56,7 +56,7 @@ pub(crate) fn goto_definition( }); } let navs = sema - .descend_into_macros(original_token.clone(), offset) + .descend_into_macros(DescendPreference::None, original_token.clone(), offset) .into_iter() .filter_map(|token| { let parent = token.parent()?; diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index 3593c5c7dd9c..6c1b9966a88d 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -1,4 +1,4 @@ -use hir::{AsAssocItem, Impl, Semantics}; +use hir::{AsAssocItem, DescendPreference, Impl, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, @@ -34,7 +34,7 @@ pub(crate) fn goto_implementation( })?; let range = original_token.text_range(); let navs = - sema.descend_into_macros(original_token, offset) + sema.descend_into_macros(DescendPreference::None, original_token, offset) .into_iter() .filter_map(|token| token.parent().and_then(ast::NameLike::cast)) .filter_map(|node| match &node { diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs index 955923d76910..4bd8bfdacb86 100644 --- a/crates/ide/src/goto_type_definition.rs +++ b/crates/ide/src/goto_type_definition.rs @@ -1,3 +1,4 @@ +use hir::DescendPreference; use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase}; use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T}; @@ -37,7 +38,7 @@ pub(crate) fn goto_type_definition( } }; let range = token.text_range(); - sema.descend_into_macros(token, offset) + sema.descend_into_macros(DescendPreference::None,token, offset) .into_iter() .filter_map(|token| { let ty = sema diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index a7f5ae92a4ca..620e59a71bf5 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -1,4 +1,4 @@ -use hir::Semantics; +use hir::{DescendPreference, Semantics}; use ide_db::{ base_db::{FileId, FilePosition, FileRange}, defs::{Definition, IdentClass}, @@ -461,7 +461,7 @@ fn find_defs( token: SyntaxToken, offset: TextSize, ) -> FxHashSet { - sema.descend_into_macros(token, offset) + sema.descend_into_macros(DescendPreference::None, token, offset) .into_iter() .filter_map(|token| IdentClass::classify_token(sema, &token)) .map(IdentClass::definitions_no_ops) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index e0b64fe7988e..7f2783df3d05 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -6,7 +6,7 @@ mod tests; use std::iter; use either::Either; -use hir::{db::DefDatabase, HasSource, LangItem, Semantics}; +use hir::{db::DefDatabase, DescendPreference, HasSource, LangItem, Semantics}; use ide_db::{ base_db::FileRange, defs::{Definition, IdentClass, NameRefClass, OperatorClass}, @@ -161,11 +161,11 @@ fn hover_simple( // prefer descending the same token kind in attribute expansions, in normal macros text // equivalency is more important - let descended = if in_attr { - [sema.descend_into_macros_with_kind_preference(original_token.clone(), offset)].into() - } else { - sema.descend_into_macros_with_same_text(original_token.clone(), offset) - }; + let descended = sema.descend_into_macros( + if in_attr { DescendPreference::SameKind } else { DescendPreference::SameText }, + original_token.clone(), + offset, + ); let descended = || descended.iter(); let result = descended() diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs index 2ca2b5b1d5f3..28d455f4e591 100644 --- a/crates/ide/src/moniker.rs +++ b/crates/ide/src/moniker.rs @@ -1,7 +1,7 @@ //! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports) //! for LSIF and LSP. -use hir::{AsAssocItem, AssocItemContainer, Crate, Semantics}; +use hir::{AsAssocItem, AssocItemContainer, Crate, DescendPreference, Semantics}; use ide_db::{ base_db::{CrateOrigin, FilePosition, LangCrateOrigin}, defs::{Definition, IdentClass}, @@ -99,7 +99,7 @@ pub(crate) fn moniker( }); } let navs = sema - .descend_into_macros(original_token.clone(), offset) + .descend_into_macros(DescendPreference::None, original_token.clone(), offset) .into_iter() .filter_map(|token| { IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| { diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index f387bbf6b014..2285ab199343 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -9,7 +9,7 @@ //! at the index that the match starts at and its tree parent is //! resolved to the search element definition, we get a reference. -use hir::{PathResolution, Semantics}; +use hir::{DescendPreference, PathResolution, Semantics}; use ide_db::{ base_db::FileId, defs::{Definition, NameClass, NameRefClass}, @@ -126,7 +126,7 @@ pub(crate) fn find_defs<'a>( ) }); token.map(|token| { - sema.descend_into_macros_with_same_text(token, offset) + sema.descend_into_macros(DescendPreference::SameText, token, offset) .into_iter() .filter_map(|it| ast::NameLike::cast(it.parent()?)) .filter_map(move |name_like| { diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index e020b52e1710..c95f9994615d 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -4,7 +4,10 @@ use std::collections::BTreeSet; use either::Either; -use hir::{AssocItem, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, Trait}; +use hir::{ + AssocItem, DescendPreference, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, + Trait, +}; use ide_db::{ active_parameter::{callable_for_node, generic_def_for_node}, base_db::FilePosition, @@ -79,7 +82,7 @@ pub(crate) fn signature_help( // if the cursor is sandwiched between two space tokens and the call is unclosed // this prevents us from leaving the CallExpression .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?; - let token = sema.descend_into_macros_single(token, offset); + let token = sema.descend_into_macros_single(DescendPreference::None, token, offset); for node in token.parent_ancestors() { match_ast! { diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index dd72484b3807..b2db6bb5c43f 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -13,7 +13,7 @@ mod html; #[cfg(test)] mod tests; -use hir::{Name, Semantics}; +use hir::{DescendPreference, Name, Semantics}; use ide_db::{FxHashMap, RootDatabase, SymbolKind}; use syntax::{ ast::{self, IsString}, @@ -393,14 +393,14 @@ fn traverse( // Attempt to descend tokens into macro-calls. let res = match element { NodeOrToken::Token(token) if token.kind() != COMMENT => { - let token = match attr_or_derive_item { - Some(AttrOrDerive::Attr(_)) => { - sema.descend_into_macros_with_kind_preference(token, 0.into()) - } - Some(AttrOrDerive::Derive(_)) | None => { - sema.descend_into_macros_single(token, 0.into()) - } - }; + let token = sema.descend_into_macros_single( + match attr_or_derive_item { + Some(AttrOrDerive::Attr(_)) => DescendPreference::SameKind, + Some(AttrOrDerive::Derive(_)) | None => DescendPreference::None, + }, + token, + 0.into(), + ); match token.parent().and_then(ast::NameLike::cast) { // Remap the token into the wrapping single token nodes Some(parent) => match (token.kind(), parent.syntax().kind()) { From d2cd30007cf404d185d451a32c471f6ea5321ca9 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 5 Dec 2023 15:42:39 +0100 Subject: [PATCH 2/5] Implicit format args support --- crates/hir-def/src/body.rs | 12 ++ crates/hir-def/src/body/lower.rs | 24 ++-- crates/hir-def/src/body/tests.rs | 6 +- crates/hir-def/src/hir/format_args.rs | 14 ++- crates/hir/src/semantics.rs | 64 +++++++--- crates/hir/src/source_analyzer.rs | 66 +++++++--- .../ide-assists/src/handlers/bool_to_enum.rs | 2 + .../convert_tuple_return_type_to_struct.rs | 2 + .../extract_expressions_from_format_string.rs | 9 +- .../src/handlers/extract_function.rs | 4 +- .../ide-assists/src/handlers/inline_call.rs | 8 +- .../src/handlers/inline_local_variable.rs | 4 +- .../replace_named_generic_with_impl.rs | 5 +- .../src/handlers/unnecessary_async.rs | 6 +- crates/ide-db/src/helpers.rs | 2 +- crates/ide-db/src/rename.rs | 10 +- crates/ide-db/src/search.rs | 117 +++++++++++++++--- crates/ide/src/call_hierarchy.rs | 2 +- crates/ide/src/doc_links.rs | 4 +- crates/ide/src/expand_macro.rs | 2 +- crates/ide/src/extend_selection.rs | 23 ++-- crates/ide/src/goto_declaration.rs | 2 +- crates/ide/src/goto_definition.rs | 38 ++++-- crates/ide/src/goto_implementation.rs | 2 +- crates/ide/src/goto_type_definition.rs | 85 ++++++++++--- crates/ide/src/highlight_related.rs | 39 ++++-- crates/ide/src/hover.rs | 18 ++- crates/ide/src/hover/tests.rs | 60 +++++++++ crates/ide/src/moniker.rs | 2 +- crates/ide/src/references.rs | 50 ++++++-- crates/ide/src/rename.rs | 91 +++++++++++--- crates/ide/src/runnables.rs | 4 +- crates/ide/src/signature_help.rs | 2 +- crates/ide/src/syntax_highlighting.rs | 1 - crates/mbe/src/token_map.rs | 1 + crates/syntax/src/ast/token_ext.rs | 6 + crates/syntax/src/token_text.rs | 2 +- 37 files changed, 615 insertions(+), 174 deletions(-) diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 1942c60c075d..db28c6731ece 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -95,6 +95,8 @@ pub struct BodySourceMap { field_map_back: FxHashMap, pat_field_map_back: FxHashMap, + format_args_template_map: FxHashMap>, + expansions: FxHashMap>, HirFileId>, /// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in @@ -387,6 +389,14 @@ impl BodySourceMap { self.expr_map.get(&src).copied() } + pub fn implicit_format_args( + &self, + node: InFile<&ast::FormatArgsExpr>, + ) -> Option<&[(syntax::TextRange, Name)]> { + let src = node.map(AstPtr::new).map(AstPtr::upcast::); + self.format_args_template_map.get(self.expr_map.get(&src)?).map(std::ops::Deref::deref) + } + /// Get a reference to the body source map's diagnostics. pub fn diagnostics(&self) -> &[BodyDiagnostic] { &self.diagnostics @@ -403,8 +413,10 @@ impl BodySourceMap { field_map_back, pat_field_map_back, expansions, + format_args_template_map, diagnostics, } = self; + format_args_template_map.shrink_to_fit(); expr_map.shrink_to_fit(); expr_map_back.shrink_to_fit(); pat_map.shrink_to_fit(); diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 0466068ec810..c22bd6e2e57c 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -1597,12 +1597,20 @@ impl ExprCollector<'_> { }); let template = f.template(); let fmt_snippet = template.as_ref().map(ToString::to_string); + let mut mappings = vec![]; let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) { - Some((s, is_direct_literal)) => { - format_args::parse(&s, fmt_snippet, args, is_direct_literal, |name| { - self.alloc_expr_desugared(Expr::Path(Path::from(name))) - }) - } + Some((s, is_direct_literal)) => format_args::parse( + &s, + fmt_snippet, + args, + is_direct_literal, + |name| self.alloc_expr_desugared(Expr::Path(Path::from(name))), + |name, span| { + if let Some(span) = span { + mappings.push((span, name.clone())) + } + }, + ), None => FormatArgs { template: Default::default(), arguments: args.finish() }, }; @@ -1746,14 +1754,16 @@ impl ExprCollector<'_> { tail: Some(unsafe_arg_new), }); - self.alloc_expr( + let idx = self.alloc_expr( Expr::Call { callee: new_v1_formatted, args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]), is_assignee_expr: false, }, syntax_ptr, - ) + ); + self.source_map.format_args_template_map.insert(idx, mappings); + idx } /// Generate a hir expression for a format_args placeholder specification. diff --git a/crates/hir-def/src/body/tests.rs b/crates/hir-def/src/body/tests.rs index 5517abb1ab70..2b432dfbb92b 100644 --- a/crates/hir-def/src/body/tests.rs +++ b/crates/hir-def/src/body/tests.rs @@ -160,7 +160,7 @@ fn main() { let count = 10; builtin#lang(Arguments::new_v1_formatted)( &[ - "\"hello ", " ", " friends, we ", " ", "", "\"", + "hello ", " ", " friends, we ", " ", "", ], &[ builtin#lang(Argument::new_display)( @@ -261,7 +261,7 @@ impl SsrError { _ = $crate::error::SsrError::new( builtin#lang(Arguments::new_v1_formatted)( &[ - "\"Failed to resolve path `", "`\"", + "Failed to resolve path `", "`", ], &[ builtin#lang(Argument::new_display)( @@ -320,7 +320,7 @@ fn f() { $crate::panicking::panic_fmt( builtin#lang(Arguments::new_v1_formatted)( &[ - "\"cc\"", + "cc", ], &[], &[], diff --git a/crates/hir-def/src/hir/format_args.rs b/crates/hir-def/src/hir/format_args.rs index 46d24bd4a614..068abb27a25c 100644 --- a/crates/hir-def/src/hir/format_args.rs +++ b/crates/hir-def/src/hir/format_args.rs @@ -5,7 +5,7 @@ use hir_expand::name::Name; use rustc_dependencies::parse_format as parse; use syntax::{ ast::{self, IsString}, - AstToken, SmolStr, TextRange, + SmolStr, TextRange, TextSize, }; use crate::hir::ExprId; @@ -170,15 +170,18 @@ pub(crate) fn parse( mut args: FormatArgumentsCollector, is_direct_literal: bool, mut synth: impl FnMut(Name) -> ExprId, + mut record_usage: impl FnMut(Name, Option), ) -> FormatArgs { - let text = s.text(); + let text = s.text_without_quotes(); let str_style = match s.quote_offsets() { Some(offsets) => { let raw = u32::from(offsets.quotes.0.len()) - 1; - (raw != 0).then_some(raw as usize) + // subtract 1 for the `r` prefix + (raw != 0).then(|| raw as usize - 1) } None => None, }; + let mut parser = parse::Parser::new(text, str_style, fmt_snippet, false, parse::ParseMode::Format); @@ -199,6 +202,7 @@ pub(crate) fn parse( let to_span = |inner_span: parse::InnerSpan| { is_source_literal.then(|| { TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap()) + - TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1) }) }; @@ -230,9 +234,10 @@ pub(crate) fn parse( Err(index) } } - ArgRef::Name(name, _span) => { + ArgRef::Name(name, span) => { let name = Name::new_text_dont_use(SmolStr::new(name)); if let Some((index, _)) = args.by_name(&name) { + record_usage(name, span); // Name found in `args`, so we resolve it to its index. if index < args.explicit_args().len() { // Mark it as used, if it was an explicit argument. @@ -246,6 +251,7 @@ pub(crate) fn parse( // disabled (see RFC #2795) // FIXME: Diagnose } + record_usage(name.clone(), span); Ok(args.add(FormatArgument { kind: FormatArgumentKind::Captured(name.clone()), // FIXME: This is problematic, we might want to synthesize a dummy diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 7d3c89ddb61f..b0e0f969d98f 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -29,8 +29,9 @@ use smallvec::{smallvec, SmallVec}; use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, - ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody}, - match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize, + ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _}, + match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, + TextRange, TextSize, }; use crate::{ @@ -49,7 +50,7 @@ pub enum DescendPreference { None, } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PathResolution { /// An item Def(ModuleDef), @@ -402,6 +403,41 @@ impl<'db> SemanticsImpl<'db> { ) } + pub fn resolve_offset_in_format_args( + &self, + string: ast::String, + offset: TextSize, + ) -> Option<(TextRange, Option)> { + debug_assert!(offset <= string.syntax().text_range().len()); + let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; + let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; + let source_analyzer = &self.analyze_no_infer(format_args.syntax())?; + let format_args = self.wrap_node_infile(format_args); + source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset) + } + + pub fn check_for_format_args_template( + &self, + original_token: SyntaxToken, + offset: TextSize, + ) -> Option<(TextRange, Option)> { + if let Some(original_string) = ast::String::cast(original_token.clone()) { + if let Some(quote) = original_string.open_quote_text_range() { + return self + .descend_into_macros(DescendPreference::SameText, original_token.clone()) + .into_iter() + .find_map(|token| { + self.resolve_offset_in_format_args( + ast::String::cast(token)?, + offset - quote.end(), + ) + }) + .map(|(range, res)| (range + quote.end(), res)); + } + } + None + } + /// Maps a node down by mapping its first and last token down. pub fn descend_node_into_attributes(&self, node: N) -> SmallVec<[N; 1]> { // This might not be the correct way to do this, but it works for now @@ -419,8 +455,12 @@ impl<'db> SemanticsImpl<'db> { if first == last { // node is just the token, so descend the token - self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| { - if let Some(node) = value.parent_ancestors().find_map(N::cast) { + self.descend_into_macros_impl(first, &mut |InFile { value, .. }| { + if let Some(node) = value + .parent_ancestors() + .take_while(|it| it.text_range() == value.text_range()) + .find_map(N::cast) + { res.push(node) } ControlFlow::Continue(()) @@ -428,7 +468,7 @@ impl<'db> SemanticsImpl<'db> { } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; - self.descend_into_macros_impl(first, 0.into(), &mut |token| { + self.descend_into_macros_impl(first, &mut |token| { scratch.push(token); ControlFlow::Continue(()) }); @@ -436,7 +476,6 @@ impl<'db> SemanticsImpl<'db> { let mut scratch = scratch.into_iter(); self.descend_into_macros_impl( last, - 0.into(), &mut |InFile { value: last, file_id: last_fid }| { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if first_fid == last_fid { @@ -467,7 +506,6 @@ impl<'db> SemanticsImpl<'db> { &self, mode: DescendPreference, token: SyntaxToken, - offset: TextSize, ) -> SmallVec<[SyntaxToken; 1]> { enum Dp<'t> { SameText(&'t str), @@ -487,7 +525,7 @@ impl<'db> SemanticsImpl<'db> { DescendPreference::None => Dp::None, }; let mut res = smallvec![]; - self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| { + self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { let is_a_match = match mode { Dp::SameText(text) => value.text() == text, Dp::SameKind(preferred_kind) => { @@ -513,7 +551,6 @@ impl<'db> SemanticsImpl<'db> { &self, mode: DescendPreference, token: SyntaxToken, - offset: TextSize, ) -> SyntaxToken { enum Dp<'t> { SameText(&'t str), @@ -533,7 +570,7 @@ impl<'db> SemanticsImpl<'db> { DescendPreference::None => Dp::None, }; let mut res = token.clone(); - self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| { + self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { let is_a_match = match mode { Dp::SameText(text) => value.text() == text, Dp::SameKind(preferred_kind) => { @@ -558,9 +595,6 @@ impl<'db> SemanticsImpl<'db> { fn descend_into_macros_impl( &self, token: SyntaxToken, - // FIXME: We might want this to be Option to be able to opt out of subrange - // mapping, specifically for node downmapping - _offset: TextSize, f: &mut dyn FnMut(InFile) -> ControlFlow<()>, ) { // FIXME: Clean this up @@ -729,7 +763,7 @@ impl<'db> SemanticsImpl<'db> { offset: TextSize, ) -> impl Iterator + '_> + '_ { node.token_at_offset(offset) - .map(move |token| self.descend_into_macros(DescendPreference::None, token, offset)) + .map(move |token| self.descend_into_macros(DescendPreference::None, token)) .map(|descendants| { descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it)) }) diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 8b1148368961..9fdee209cfbc 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -820,6 +820,29 @@ impl SourceAnalyzer { false } + pub(crate) fn resolve_offset_in_format_args( + &self, + db: &dyn HirDatabase, + format_args: InFile<&ast::FormatArgsExpr>, + offset: TextSize, + ) -> Option<(TextRange, Option)> { + let implicits = self.body_source_map()?.implicit_format_args(format_args)?; + implicits.iter().find(|(range, _)| range.contains_inclusive(offset)).map(|(range, name)| { + ( + *range, + resolve_hir_value_path( + db, + &self.resolver, + self.resolver.body_owner(), + &Path::from_known_path_with_no_generic(ModPath::from_segments( + PathKind::Plain, + Some(name.clone()), + )), + ), + ) + }) + } + fn resolve_impl_method_or_trait_def( &self, db: &dyn HirDatabase, @@ -1038,24 +1061,7 @@ fn resolve_hir_path_( }; let body_owner = resolver.body_owner(); - let values = || { - resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| { - let res = match val { - ValueNs::LocalBinding(binding_id) => { - let var = Local { parent: body_owner?, binding_id }; - PathResolution::Local(var) - } - ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()), - ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()), - ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()), - ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()), - ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()), - ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()), - ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()), - }; - Some(res) - }) - }; + let values = || resolve_hir_value_path(db, resolver, body_owner, path); let items = || { resolver @@ -1075,6 +1081,30 @@ fn resolve_hir_path_( .or_else(macros) } +fn resolve_hir_value_path( + db: &dyn HirDatabase, + resolver: &Resolver, + body_owner: Option, + path: &Path, +) -> Option { + resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| { + let res = match val { + ValueNs::LocalBinding(binding_id) => { + let var = Local { parent: body_owner?, binding_id }; + PathResolution::Local(var) + } + ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()), + ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()), + ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()), + ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()), + ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()), + ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()), + ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()), + }; + Some(res) + }) +} + /// Resolves a path where we know it is a qualifier of another path. /// /// For example, if we have: diff --git a/crates/ide-assists/src/handlers/bool_to_enum.rs b/crates/ide-assists/src/handlers/bool_to_enum.rs index 11facc5bee2a..0f2d1057c0a4 100644 --- a/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -328,6 +328,7 @@ fn augment_references_with_imports( references .iter() .filter_map(|FileReference { range, name, .. }| { + let name = name.clone().into_name_like()?; ctx.sema.scope(name.syntax()).map(|scope| (*range, name, scope.module())) }) .map(|(range, name, ref_module)| { @@ -455,6 +456,7 @@ fn add_enum_def( .iter() .flat_map(|(_, refs)| refs) .filter_map(|FileReference { name, .. }| { + let name = name.clone().into_name_like()?; ctx.sema.scope(name.syntax()).map(|scope| scope.module()) }) .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module); diff --git a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index 1f3caa7db33f..79b46d66121e 100644 --- a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -186,6 +186,7 @@ fn augment_references_with_imports( references .iter() .filter_map(|FileReference { name, .. }| { + let name = name.clone().into_name_like()?; ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module())) }) .map(|(name, ref_module)| { @@ -238,6 +239,7 @@ fn add_tuple_struct_def( .iter() .flat_map(|(_, refs)| refs) .filter_map(|FileReference { name, .. }| { + let name = name.clone().into_name_like()?; ctx.sema.scope(name.syntax()).map(|scope| scope.module()) }) .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module); diff --git a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 55f2fd9f6ce8..9d72d3af096a 100644 --- a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -35,11 +35,10 @@ pub(crate) fn extract_expressions_from_format_string( let fmt_string = ctx.find_token_at_offset::()?; let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; - let expanded_t = ast::String::cast(ctx.sema.descend_into_macros_single( - DescendPreference::SameKind, - fmt_string.syntax().clone(), - 0.into(), - ))?; + let expanded_t = ast::String::cast( + ctx.sema + .descend_into_macros_single(DescendPreference::SameKind, fmt_string.syntax().clone()), + )?; if !is_format_string(&expanded_t) { return None; } diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index 9b892ac1e981..d4a12307790b 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -751,9 +751,7 @@ impl FunctionBody { .descendants_with_tokens() .filter_map(SyntaxElement::into_token) .filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self])) - .flat_map(|t| { - sema.descend_into_macros(DescendPreference::None, t, 0.into()) - }) + .flat_map(|t| sema.descend_into_macros(DescendPreference::None, t)) .for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast))); } } diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index f8c75bdb0de1..5b9cc5f66cde 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -8,7 +8,7 @@ use ide_db::{ defs::Definition, imports::insert_use::remove_path_if_in_use_stmt, path_transform::PathTransform, - search::{FileReference, SearchScope}, + search::{FileReference, FileReferenceNode, SearchScope}, source_change::SourceChangeBuilder, syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref}, RootDatabase, @@ -148,7 +148,7 @@ pub(super) fn split_refs_and_uses( ) -> (Vec, Vec) { iter.into_iter() .filter_map(|file_ref| match file_ref.name { - ast::NameLike::NameRef(name_ref) => Some(name_ref), + FileReferenceNode::NameRef(name_ref) => Some(name_ref), _ => None, }) .filter_map(|name_ref| match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) { @@ -346,7 +346,7 @@ fn inline( match param.as_local(sema.db) { Some(l) => usages_for_locals(l) .map(|FileReference { name, range, .. }| match name { - ast::NameLike::NameRef(_) => body + FileReferenceNode::NameRef(_) => body .syntax() .covering_element(range) .ancestors() @@ -372,7 +372,7 @@ fn inline( if let Some(self_local) = params[0].2.as_local(sema.db) { usages_for_locals(self_local) .filter_map(|FileReference { name, range, .. }| match name { - ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)), + FileReferenceNode::NameRef(_) => Some(body.syntax().covering_element(range)), _ => None, }) .for_each(|usage| { diff --git a/crates/ide-assists/src/handlers/inline_local_variable.rs b/crates/ide-assists/src/handlers/inline_local_variable.rs index 49dcde75d2b3..5d8ba43ec846 100644 --- a/crates/ide-assists/src/handlers/inline_local_variable.rs +++ b/crates/ide-assists/src/handlers/inline_local_variable.rs @@ -2,7 +2,7 @@ use hir::{PathResolution, Semantics}; use ide_db::{ base_db::FileId, defs::Definition, - search::{FileReference, UsageSearchResult}, + search::{FileReference, FileReferenceNode, UsageSearchResult}, RootDatabase, }; use syntax::{ @@ -63,7 +63,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>) let wrap_in_parens = references .into_iter() .filter_map(|FileReference { range, name, .. }| match name { - ast::NameLike::NameRef(name) => Some((range, name)), + FileReferenceNode::NameRef(name) => Some((range, name)), _ => None, }) .map(|(range, name_ref)| { diff --git a/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs b/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs index c7c0be4c7d4f..e61ce481727a 100644 --- a/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs +++ b/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs @@ -59,7 +59,10 @@ pub(crate) fn replace_named_generic_with_impl( let mut path_types_to_replace = Vec::new(); for (_a, refs) in usage_refs.iter() { for usage_ref in refs { - let param_node = find_path_type(&ctx.sema, &type_param_name, &usage_ref.name)?; + let Some(name_like) = usage_ref.name.clone().into_name_like() else { + continue; + }; + let param_node = find_path_type(&ctx.sema, &type_param_name, &name_like)?; path_types_to_replace.push(param_node); } } diff --git a/crates/ide-assists/src/handlers/unnecessary_async.rs b/crates/ide-assists/src/handlers/unnecessary_async.rs index 7f612c2a142c..1cfa291a29d8 100644 --- a/crates/ide-assists/src/handlers/unnecessary_async.rs +++ b/crates/ide-assists/src/handlers/unnecessary_async.rs @@ -2,11 +2,11 @@ use ide_db::{ assists::{AssistId, AssistKind}, base_db::FileId, defs::Definition, - search::FileReference, + search::{FileReference, FileReferenceNode}, syntax_helpers::node_ext::full_path_of_name_ref, }; use syntax::{ - ast::{self, NameLike, NameRef}, + ast::{self, NameRef}, AstNode, SyntaxKind, TextRange, }; @@ -76,7 +76,7 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O for await_expr in find_all_references(ctx, &Definition::Function(fn_def)) // Keep only references that correspond NameRefs. .filter_map(|(_, reference)| match reference.name { - NameLike::NameRef(nameref) => Some(nameref), + FileReferenceNode::NameRef(nameref) => Some(nameref), _ => None, }) // Keep only references that correspond to await expressions diff --git a/crates/ide-db/src/helpers.rs b/crates/ide-db/src/helpers.rs index d4b031879d11..9363bdfa14b2 100644 --- a/crates/ide-db/src/helpers.rs +++ b/crates/ide-db/src/helpers.rs @@ -117,7 +117,7 @@ pub fn get_definition( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, ) -> Option { - for token in sema.descend_into_macros(DescendPreference::None, token, 0.into()) { + for token in sema.descend_into_macros(DescendPreference::None, token) { let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); if let Some(&[x]) = def.as_deref() { return Some(x); diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 676d286b8dcc..d2b6a732689c 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -34,7 +34,7 @@ use text_edit::{TextEdit, TextEditBuilder}; use crate::{ defs::Definition, - search::FileReference, + search::{FileReference, FileReferenceNode}, source_change::{FileSystemEdit, SourceChange}, syntax_helpers::node_ext::expr_as_name_ref, traits::convert_to_def_in_trait, @@ -361,7 +361,7 @@ pub fn source_edit_from_references( // macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far let mut edited_ranges = Vec::new(); for &FileReference { range, ref name, .. } in references { - let name_range = name.syntax().text_range(); + let name_range = name.text_range(); if name_range.len() != range.len() { // This usage comes from a different token kind that was downmapped to a NameLike in a macro // Renaming this will most likely break things syntax-wise @@ -371,17 +371,17 @@ pub fn source_edit_from_references( // if the ranges differ then the node is inside a macro call, we can't really attempt // to make special rewrites like shorthand syntax and such, so just rename the node in // the macro input - ast::NameLike::NameRef(name_ref) if name_range == range => { + FileReferenceNode::NameRef(name_ref) if name_range == range => { source_edit_from_name_ref(&mut edit, name_ref, new_name, def) } - ast::NameLike::Name(name) if name_range == range => { + FileReferenceNode::Name(name) if name_range == range => { source_edit_from_name(&mut edit, name, new_name) } _ => false, }; if !has_emitted_edit && !edited_ranges.contains(&range.start()) { let (range, new_name) = match name { - ast::NameLike::Lifetime(_) => ( + FileReferenceNode::Lifetime(_) => ( TextRange::new(range.start() + syntax::TextSize::from(1), range.end()), new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(), ), diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index c1ed17503fe9..dbef36026822 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -9,13 +9,13 @@ use std::mem; use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use hir::{ AsAssocItem, DefWithBody, DescendPreference, HasAttrs, HasSource, HirFileIdExt, InFile, - InRealFile, ModuleSource, Semantics, Visibility, + InRealFile, ModuleSource, PathResolution, Semantics, Visibility, }; use memchr::memmem::Finder; use nohash_hasher::IntMap; use once_cell::unsync::Lazy; use parser::SyntaxKind; -use syntax::{ast, match_ast, AstNode, TextRange, TextSize}; +use syntax::{ast, match_ast, AstNode, AstToken, SyntaxElement, TextRange, TextSize}; use triomphe::Arc; use crate::{ @@ -63,10 +63,67 @@ pub struct FileReference { /// The range of the reference in the original file pub range: TextRange, /// The node of the reference in the (macro-)file - pub name: ast::NameLike, + pub name: FileReferenceNode, pub category: Option, } +#[derive(Debug, Clone)] +pub enum FileReferenceNode { + Name(ast::Name), + NameRef(ast::NameRef), + Lifetime(ast::Lifetime), + FormatStringEntry(ast::String, TextRange), +} + +impl FileReferenceNode { + pub fn text_range(&self) -> TextRange { + match self { + FileReferenceNode::Name(it) => it.syntax().text_range(), + FileReferenceNode::NameRef(it) => it.syntax().text_range(), + FileReferenceNode::Lifetime(it) => it.syntax().text_range(), + FileReferenceNode::FormatStringEntry(_, range) => *range, + } + } + pub fn syntax(&self) -> SyntaxElement { + match self { + FileReferenceNode::Name(it) => it.syntax().clone().into(), + FileReferenceNode::NameRef(it) => it.syntax().clone().into(), + FileReferenceNode::Lifetime(it) => it.syntax().clone().into(), + FileReferenceNode::FormatStringEntry(it, _) => it.syntax().clone().into(), + } + } + pub fn into_name_like(self) -> Option { + match self { + FileReferenceNode::Name(it) => Some(ast::NameLike::Name(it)), + FileReferenceNode::NameRef(it) => Some(ast::NameLike::NameRef(it)), + FileReferenceNode::Lifetime(it) => Some(ast::NameLike::Lifetime(it)), + FileReferenceNode::FormatStringEntry(_, _) => None, + } + } + pub fn as_name_ref(&self) -> Option<&ast::NameRef> { + match self { + FileReferenceNode::NameRef(name_ref) => Some(name_ref), + _ => None, + } + } + pub fn as_lifetime(&self) -> Option<&ast::Lifetime> { + match self { + FileReferenceNode::Lifetime(lifetime) => Some(lifetime), + _ => None, + } + } + pub fn text(&self) -> syntax::TokenText<'_> { + match self { + FileReferenceNode::NameRef(name_ref) => name_ref.text(), + FileReferenceNode::Name(name) => name.text(), + FileReferenceNode::Lifetime(lifetime) => lifetime.text(), + FileReferenceNode::FormatStringEntry(it, range) => { + syntax::TokenText::borrowed(&it.text()[*range - it.syntax().text_range().start()]) + } + } + } +} + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum ReferenceCategory { // FIXME: Add this variant and delete the `retain_adt_literal_usages` function. @@ -467,7 +524,7 @@ impl<'a> FindUsages<'a> { // every textual hit. That function is notoriously // expensive even for things that do not get down mapped // into macros. - sema.descend_into_macros(DescendPreference::None, token, offset) + sema.descend_into_macros(DescendPreference::None, token) .into_iter() .filter_map(|it| it.parent()) }) @@ -479,6 +536,17 @@ impl<'a> FindUsages<'a> { // Search for occurrences of the items name for offset in match_indices(&text, finder, search_range) { + tree.token_at_offset(offset).into_iter().for_each(|token| { + let Some(str_token) = ast::String::cast(token.clone()) else { return }; + if let Some((range, nameres)) = + sema.check_for_format_args_template(token.clone(), offset) + { + if self.found_format_args_ref(file_id, range, str_token, nameres, sink) { + return; + } + } + }); + for name in find_nodes(name, &tree, offset).filter_map(ast::NameLike::cast) { if match name { ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink), @@ -593,7 +661,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: None, }; sink(file_id, reference) @@ -612,7 +680,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import), }; sink(file_id, reference) @@ -621,6 +689,27 @@ impl<'a> FindUsages<'a> { } } + fn found_format_args_ref( + &self, + file_id: FileId, + range: TextRange, + token: ast::String, + res: Option, + sink: &mut dyn FnMut(FileId, FileReference) -> bool, + ) -> bool { + match res.map(Definition::from) { + Some(def) if def == self.def => { + let reference = FileReference { + range, + name: FileReferenceNode::FormatStringEntry(token, range), + category: Some(ReferenceCategory::Read), + }; + sink(file_id, reference) + } + _ => false, + } + } + fn found_lifetime( &self, lifetime: &ast::Lifetime, @@ -631,7 +720,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(lifetime.syntax()); let reference = FileReference { range, - name: ast::NameLike::Lifetime(lifetime.clone()), + name: FileReferenceNode::Lifetime(lifetime.clone()), category: None, }; sink(file_id, reference) @@ -655,7 +744,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: ReferenceCategory::new(&def, name_ref), }; sink(file_id, reference) @@ -671,7 +760,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: ReferenceCategory::new(&def, name_ref), }; sink(file_id, reference) @@ -681,7 +770,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: ReferenceCategory::new(&def, name_ref), }; sink(file_id, reference) @@ -705,7 +794,7 @@ impl<'a> FindUsages<'a> { }; let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: access, }; sink(file_id, reference) @@ -728,7 +817,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let reference = FileReference { range, - name: ast::NameLike::Name(name.clone()), + name: FileReferenceNode::Name(name.clone()), // FIXME: mutable patterns should have `Write` access category: Some(ReferenceCategory::Read), }; @@ -738,7 +827,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let reference = FileReference { range, - name: ast::NameLike::Name(name.clone()), + name: FileReferenceNode::Name(name.clone()), category: None, }; sink(file_id, reference) @@ -763,7 +852,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let reference = FileReference { range, - name: ast::NameLike::Name(name.clone()), + name: FileReferenceNode::Name(name.clone()), category: None, }; sink(file_id, reference) diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index 70391cd847b9..5cc64e60ed85 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -87,7 +87,7 @@ pub(crate) fn outgoing_calls( })?; let mut calls = CallLocations::default(); - sema.descend_into_macros(DescendPreference::None, token, offset) + sema.descend_into_macros(DescendPreference::None, token) .into_iter() .filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast)) .filter_map(|item| match item { diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index 97fa7dee306a..9760f9daf0a3 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -146,7 +146,7 @@ pub(crate) fn external_docs( kind if kind.is_trivia() => 0, _ => 1, })?; - let token = sema.descend_into_macros_single(DescendPreference::None, token, offset); + let token = sema.descend_into_macros_single(DescendPreference::None, token); let node = token.parent()?; let definition = match_ast! { @@ -288,7 +288,7 @@ impl DocCommentToken { let original_start = doc_token.text_range().start(); let relative_comment_offset = offset - original_start - prefix_len; - sema.descend_into_macros(DescendPreference::None,doc_token, offset).into_iter().find_map(|t| { + sema.descend_into_macros(DescendPreference::None, doc_token).into_iter().find_map(|t| { let (node, descended_prefix_len) = match_ast! { match t { ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?), diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index cc878dc7196b..653d9f088384 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -41,7 +41,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< // ``` let derive = sema - .descend_into_macros(DescendPreference::None, tok.clone(), 0.into()) + .descend_into_macros(DescendPreference::None, tok.clone()) .into_iter() .find_map(|descended| { let hir_file = sema.hir_file_for(&descended.parent()?); diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index 1cdbf7840e75..b706e959d34e 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs @@ -140,16 +140,10 @@ fn extend_tokens_from_range( // compute original mapped token range let extended = { - let fst_expanded = sema.descend_into_macros_single( - DescendPreference::None, - first_token.clone(), - original_range.start(), - ); - let lst_expanded = sema.descend_into_macros_single( - DescendPreference::None, - last_token.clone(), - original_range.end(), - ); + let fst_expanded = + sema.descend_into_macros_single(DescendPreference::None, first_token.clone()); + let lst_expanded = + sema.descend_into_macros_single(DescendPreference::None, last_token.clone()); let mut lca = algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?; lca = shallowest_node(&lca); @@ -160,11 +154,10 @@ fn extend_tokens_from_range( }; // Compute parent node range - let validate = |offset: TextSize| { + let validate = || { let extended = &extended; move |token: &SyntaxToken| -> bool { - let expanded = - sema.descend_into_macros_single(DescendPreference::None, token.clone(), offset); + let expanded = sema.descend_into_macros_single(DescendPreference::None, token.clone()); let parent = match expanded.parent() { Some(it) => it, None => return false, @@ -178,14 +171,14 @@ fn extend_tokens_from_range( let token = token.prev_token()?; skip_trivia_token(token, Direction::Prev) }) - .take_while(validate(original_range.start())) + .take_while(validate()) .last()?; let last = successors(Some(last_token), |token| { let token = token.next_token()?; skip_trivia_token(token, Direction::Next) }) - .take_while(validate(original_range.end())) + .take_while(validate()) .last()?; let range = first.text_range().cover(last.text_range()); diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs index ee94dff5fb30..ad7ec1964567 100644 --- a/crates/ide/src/goto_declaration.rs +++ b/crates/ide/src/goto_declaration.rs @@ -29,7 +29,7 @@ pub(crate) fn goto_declaration( .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?; let range = original_token.text_range(); let info: Vec = sema - .descend_into_macros(DescendPreference::None, original_token, offset) + .descend_into_macros(DescendPreference::None, original_token) .iter() .filter_map(|token| { let parent = token.parent()?; diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 635f82686206..5ca82a362f53 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -55,8 +55,21 @@ pub(crate) fn goto_definition( Some(RangeInfo::new(link_range, vec![nav])) }); } + + if let Some((range, resolution)) = + sema.check_for_format_args_template(original_token.clone(), offset) + { + return Some(RangeInfo::new( + range, + match resolution { + Some(res) => def_to_nav(db, Definition::from(res)), + None => vec![], + }, + )); + } + let navs = sema - .descend_into_macros(DescendPreference::None, original_token.clone(), offset) + .descend_into_macros(DescendPreference::None, original_token.clone()) .into_iter() .filter_map(|token| { let parent = token.parent()?; @@ -809,18 +822,13 @@ mod confuse_index { fn foo(); } fn goto_through_format() { check( r#" +//- minicore: fmt #[macro_export] macro_rules! format { ($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*))) } -#[rustc_builtin_macro] -#[macro_export] -macro_rules! format_args { - ($fmt:expr) => ({ /* compiler built-in */ }); - ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ }) -} pub mod __export { - pub use crate::format_args; + pub use core::format_args; fn foo() {} // for index confusion } fn foo() -> i8 {} @@ -2056,6 +2064,20 @@ fn f2() { struct S2; S1::e$0(); } +"#, + ); + } + + #[test] + fn implicit_format_args() { + check( + r#" +//- minicore: fmt +fn test() { + let a = "world"; + // ^ + format_args!("hello {a$0}"); +} "#, ); } diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index 6c1b9966a88d..bb474282dd7c 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -34,7 +34,7 @@ pub(crate) fn goto_implementation( })?; let range = original_token.text_range(); let navs = - sema.descend_into_macros(DescendPreference::None, original_token, offset) + sema.descend_into_macros(DescendPreference::None, original_token) .into_iter() .filter_map(|token| token.parent().and_then(ast::NameLike::cast)) .filter_map(|node| match &node { diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs index 4bd8bfdacb86..83f134aaaf57 100644 --- a/crates/ide/src/goto_type_definition.rs +++ b/crates/ide/src/goto_type_definition.rs @@ -1,4 +1,4 @@ -use hir::DescendPreference; +use hir::{DescendPreference, GenericParam}; use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase}; use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T}; @@ -37,8 +37,37 @@ pub(crate) fn goto_type_definition( } } }; + let mut process_ty = |ty: hir::Type| { + // collect from each `ty` into the `res` result vec + let ty = ty.strip_references(); + ty.walk(db, |t| { + if let Some(adt) = t.as_adt() { + push(adt.into()); + } else if let Some(trait_) = t.as_dyn_trait() { + push(trait_.into()); + } else if let Some(traits) = t.as_impl_traits(db) { + traits.for_each(|it| push(it.into())); + } else if let Some(trait_) = t.as_associated_type_parent_trait(db) { + push(trait_.into()); + } + }); + }; + if let Some((range, resolution)) = sema.check_for_format_args_template(token.clone(), offset) { + if let Some(ty) = resolution.and_then(|res| match Definition::from(res) { + Definition::Const(it) => Some(it.ty(db)), + Definition::Static(it) => Some(it.ty(db)), + Definition::GenericParam(GenericParam::ConstParam(it)) => Some(it.ty(db)), + Definition::Local(it) => Some(it.ty(db)), + Definition::Adt(hir::Adt::Struct(it)) => Some(it.ty(db)), + _ => None, + }) { + process_ty(ty); + } + return Some(RangeInfo::new(range, res)); + } + let range = token.text_range(); - sema.descend_into_macros(DescendPreference::None,token, offset) + sema.descend_into_macros(DescendPreference::None, token) .into_iter() .filter_map(|token| { let ty = sema @@ -76,21 +105,7 @@ pub(crate) fn goto_type_definition( }); ty }) - .for_each(|ty| { - // collect from each `ty` into the `res` result vec - let ty = ty.strip_references(); - ty.walk(db, |t| { - if let Some(adt) = t.as_adt() { - push(adt.into()); - } else if let Some(trait_) = t.as_dyn_trait() { - push(trait_.into()); - } else if let Some(traits) = t.as_impl_traits(db) { - traits.for_each(|it| push(it.into())); - } else if let Some(trait_) = t.as_associated_type_parent_trait(db) { - push(trait_.into()); - } - }); - }); + .for_each(process_ty); Some(RangeInfo::new(range, res)) } @@ -326,6 +341,42 @@ struct Baz(T); //^^^ fn foo(x$0: Bar, Baz) {} +"#, + ); + } + + #[test] + fn implicit_format_args() { + check( + r#" +//- minicore: fmt +struct Bar; + // ^^^ + fn test() { + let a = Bar; + format_args!("hello {a$0}"); +} +"#, + ); + check( + r#" +//- minicore: fmt +struct Bar; + // ^^^ + fn test() { + format_args!("hello {Bar$0}"); +} +"#, + ); + check( + r#" +//- minicore: fmt +struct Bar; + // ^^^ +const BAR: Bar = Bar; +fn test() { + format_args!("hello {BAR$0}"); +} "#, ); } diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index 620e59a71bf5..8daff8c2ebe3 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -1,3 +1,5 @@ +use std::iter; + use hir::{DescendPreference, Semantics}; use ide_db::{ base_db::{FileId, FilePosition, FileRange}, @@ -15,7 +17,6 @@ use syntax::{ SyntaxKind::{self, IDENT, INT_NUMBER}, SyntaxNode, SyntaxToken, TextRange, T, }; -use text_edit::TextSize; use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav}; @@ -132,7 +133,16 @@ fn highlight_references( token: SyntaxToken, FilePosition { file_id, offset }: FilePosition, ) -> Option> { - let defs = find_defs(sema, token.clone(), offset); + let defs = if let Some((range, resolution)) = + sema.check_for_format_args_template(token.clone(), offset) + { + match resolution.map(Definition::from) { + Some(def) => iter::once(def).collect(), + None => return Some(vec![HighlightedRange { range, category: None }]), + } + } else { + find_defs(sema, token.clone()) + }; let usages = defs .iter() .filter_map(|&d| { @@ -456,12 +466,8 @@ fn cover_range(r0: Option, r1: Option) -> Option, - token: SyntaxToken, - offset: TextSize, -) -> FxHashSet { - sema.descend_into_macros(DescendPreference::None, token, offset) +fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet { + sema.descend_into_macros(DescendPreference::None, token) .into_iter() .filter_map(|token| IdentClass::classify_token(sema, &token)) .map(IdentClass::definitions_no_ops) @@ -1620,6 +1626,23 @@ fn f2(t: T) { T::C; T::f(); } +"#, + ); + } + + #[test] + fn implicit_format_args() { + check( + r#" +//- minicore: fmt +fn test() { + let a = "foo"; + // ^ + format_args!("hello {a} {a$0} {}", a); + // ^read + // ^read + // ^read +} "#, ); } diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 7f2783df3d05..88a5b623425d 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -150,6 +150,19 @@ fn hover_simple( }); } + if let Some((range, resolution)) = + sema.check_for_format_args_template(original_token.clone(), offset) + { + let res = hover_for_definition( + sema, + file_id, + Definition::from(resolution?), + &original_token.parent()?, + config, + )?; + return Some(RangeInfo::new(range, res)); + } + let in_attr = original_token .parent_ancestors() .filter_map(ast::Item::cast) @@ -164,7 +177,6 @@ fn hover_simple( let descended = sema.descend_into_macros( if in_attr { DescendPreference::SameKind } else { DescendPreference::SameText }, original_token.clone(), - offset, ); let descended = || descended.iter(); @@ -298,11 +310,11 @@ pub(crate) fn hover_for_definition( sema: &Semantics<'_, RootDatabase>, file_id: FileId, definition: Definition, - node: &SyntaxNode, + scope_node: &SyntaxNode, config: &HoverConfig, ) -> Option { let famous_defs = match &definition { - Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())), + Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())), _ => None, }; render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| { diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index d3d492f3fdef..53585624b68a 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -6613,3 +6613,63 @@ fn test() { "#]], ); } + +#[test] +fn format_args_implicit() { + check( + r#" +//- minicore: fmt +fn test() { +let aaaaa = "foo"; +format_args!("{aaaaa$0}"); +} +"#, + expect![[r#" + *aaaaa* + + ```rust + let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1 + ``` + "#]], + ); +} + +#[test] +fn format_args_implicit2() { + check( + r#" +//- minicore: fmt +fn test() { +let aaaaa = "foo"; +format_args!("{$0aaaaa}"); +} +"#, + expect![[r#" + *aaaaa* + + ```rust + let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1 + ``` + "#]], + ); +} + +#[test] +fn format_args_implicit_raw() { + check( + r#" +//- minicore: fmt +fn test() { +let aaaaa = "foo"; +format_args!(r"{$0aaaaa}"); +} +"#, + expect![[r#" + *aaaaa* + + ```rust + let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1 + ``` + "#]], + ); +} diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs index 28d455f4e591..8e8bb5e0139e 100644 --- a/crates/ide/src/moniker.rs +++ b/crates/ide/src/moniker.rs @@ -99,7 +99,7 @@ pub(crate) fn moniker( }); } let navs = sema - .descend_into_macros(DescendPreference::None, original_token.clone(), offset) + .descend_into_macros(DescendPreference::None, original_token.clone()) .into_iter() .filter_map(|token| { IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| { diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index 2285ab199343..b805ddfa2678 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -109,7 +109,7 @@ pub(crate) fn find_all_refs( } None => { let search = make_searcher(false); - Some(find_defs(sema, &syntax, position.offset)?.map(search).collect()) + Some(find_defs(sema, &syntax, position.offset)?.into_iter().map(search).collect()) } } } @@ -118,15 +118,27 @@ pub(crate) fn find_defs<'a>( sema: &'a Semantics<'_, RootDatabase>, syntax: &SyntaxNode, offset: TextSize, -) -> Option + 'a> { +) -> Option + 'a> { let token = syntax.token_at_offset(offset).find(|t| { matches!( t.kind(), - IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] + IDENT + | INT_NUMBER + | LIFETIME_IDENT + | STRING + | T![self] + | T![super] + | T![crate] + | T![Self] ) - }); - token.map(|token| { - sema.descend_into_macros(DescendPreference::SameText, token, offset) + })?; + + if let Some((_, resolution)) = sema.check_for_format_args_template(token.clone(), offset) { + return resolution.map(Definition::from).map(|it| vec![it]); + } + + Some( + sema.descend_into_macros(DescendPreference::SameText, token) .into_iter() .filter_map(|it| ast::NameLike::cast(it.parent()?)) .filter_map(move |name_like| { @@ -162,7 +174,8 @@ pub(crate) fn find_defs<'a>( }; Some(def) }) - }) + .collect(), + ) } pub(crate) fn decl_mutability(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> bool { @@ -2092,4 +2105,27 @@ fn main() { r#fn(); } "#]], ); } + + #[test] + fn implicit_format_args() { + check( + r#" +//- minicore: fmt +fn test() { + let a = "foo"; + format_args!("hello {a} {a$0} {}", a); + // ^ + // ^ + // ^ +} +"#, + expect![[r#" + a Local FileId(0) 20..21 20..21 + + FileId(0) 56..57 Read + FileId(0) 60..61 Read + FileId(0) 68..69 Read + "#]], + ); + } } diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index ad1eb2499718..1febfabfcb7f 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -6,14 +6,16 @@ use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics}; use ide_db::{ - base_db::FileId, + base_db::{FileId, FileRange}, defs::{Definition, NameClass, NameRefClass}, rename::{bail, format_err, source_edit_from_references, IdentifierKind}, RootDatabase, }; use itertools::Itertools; use stdx::{always, never}; -use syntax::{ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxNode, TextRange, TextSize}; +use syntax::{ + ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange, TextSize, +}; use text_edit::TextEdit; @@ -34,23 +36,20 @@ pub(crate) fn prepare_rename( let syntax = source_file.syntax(); let res = find_definitions(&sema, syntax, position)? - .map(|(name_like, def)| { + .map(|(frange, kind, def)| { // ensure all ranges are valid if def.range_for_rename(&sema).is_none() { bail!("No references found at position") } - let Some(frange) = sema.original_range_opt(name_like.syntax()) else { - bail!("No references found at position"); - }; always!( frange.range.contains_inclusive(position.offset) && frange.file_id == position.file_id ); - Ok(match name_like { - ast::NameLike::Lifetime(_) => { + Ok(match kind { + SyntaxKind::LIFETIME => { TextRange::new(frange.range.start() + TextSize::from(1), frange.range.end()) } _ => frange.range, @@ -93,7 +92,7 @@ pub(crate) fn rename( let defs = find_definitions(&sema, syntax, position)?; let ops: RenameResult> = defs - .map(|(_namelike, def)| { + .map(|(.., def)| { if let Definition::Local(local) = def { if let Some(self_param) = local.as_self_param(sema.db) { cov_mark::hit!(rename_self_to_param); @@ -134,11 +133,27 @@ pub(crate) fn will_rename_file( fn find_definitions( sema: &Semantics<'_, RootDatabase>, syntax: &SyntaxNode, - position: FilePosition, -) -> RenameResult> { - let symbols = sema - .find_nodes_at_offset_with_descend::(syntax, position.offset) - .map(|name_like| { + FilePosition { file_id, offset }: FilePosition, +) -> RenameResult> { + let token = syntax.token_at_offset(offset).find(|t| matches!(t.kind(), SyntaxKind::STRING)); + + if let Some((range, Some(resolution))) = + token.and_then(|token| sema.check_for_format_args_template(token, offset)) + { + return Ok(vec![( + FileRange { file_id, range }, + SyntaxKind::STRING, + Definition::from(resolution), + )] + .into_iter()); + } + + let symbols = + sema.find_nodes_at_offset_with_descend::(syntax, offset).map(|name_like| { + let kind = name_like.syntax().kind(); + let range = sema + .original_range_opt(name_like.syntax()) + .ok_or_else(|| format_err!("No references found at position"))?; let res = match &name_like { // renaming aliases would rename the item being aliased as the HIR doesn't track aliases yet ast::NameLike::Name(name) @@ -163,7 +178,6 @@ fn find_definitions( Definition::Local(local_def) } }) - .map(|def| (name_like.clone(), def)) .ok_or_else(|| format_err!("No references found at position")), ast::NameLike::NameRef(name_ref) => { NameRefClass::classify(sema, name_ref) @@ -187,7 +201,7 @@ fn find_definitions( { Err(format_err!("Renaming aliases is currently unsupported")) } else { - Ok((name_like.clone(), def)) + Ok(def) } }) } @@ -203,11 +217,10 @@ fn find_definitions( _ => None, }) }) - .map(|def| (name_like, def)) .ok_or_else(|| format_err!("No references found at position")) } }; - res + res.map(|def| (range, kind, def)) }); let res: RenameResult> = symbols.collect(); @@ -218,7 +231,7 @@ fn find_definitions( Err(format_err!("No references found at position")) } else { // remove duplicates, comparing `Definition`s - Ok(v.into_iter().unique_by(|t| t.1)) + Ok(v.into_iter().unique_by(|&(.., def)| def).collect::>().into_iter()) } } Err(e) => Err(e), @@ -2663,4 +2676,44 @@ struct A; "error: Cannot rename a non-local definition.", ) } + + #[test] + fn implicit_format_args() { + check( + "fbar", + r#" +//- minicore: fmt +fn test() { + let foo = "foo"; + format_args!("hello {foo} {foo$0} {}", foo); +} +"#, + r#" +fn test() { + let fbar = "foo"; + format_args!("hello {fbar} {fbar} {}", fbar); +} +"#, + ); + } + + #[test] + fn implicit_format_args2() { + check( + "fo", + r#" +//- minicore: fmt +fn test() { + let foo = "foo"; + format_args!("hello {foo} {foo$0} {}", foo); +} +"#, + r#" +fn test() { + let fo = "foo"; + format_args!("hello {fo} {fo} {}", fo); +} +"#, + ); + } } diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 954a364c7870..d487a538bb57 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -9,7 +9,7 @@ use ide_db::{ defs::Definition, documentation::docs_from_attrs, helpers::visit_file_defs, - search::SearchScope, + search::{FileReferenceNode, SearchScope}, FxHashMap, FxHashSet, RootDatabase, SymbolKind, }; use itertools::Itertools; @@ -240,7 +240,7 @@ fn find_related_tests( .flatten(); for ref_ in defs { let name_ref = match ref_.name { - ast::NameLike::NameRef(name_ref) => name_ref, + FileReferenceNode::NameRef(name_ref) => name_ref, _ => continue, }; if let Some(fn_def) = diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index c95f9994615d..990376a49659 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -82,7 +82,7 @@ pub(crate) fn signature_help( // if the cursor is sandwiched between two space tokens and the call is unclosed // this prevents us from leaving the CallExpression .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?; - let token = sema.descend_into_macros_single(DescendPreference::None, token, offset); + let token = sema.descend_into_macros_single(DescendPreference::None, token); for node in token.parent_ancestors() { match_ast! { diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index b2db6bb5c43f..ac14afa13208 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -399,7 +399,6 @@ fn traverse( Some(AttrOrDerive::Derive(_)) | None => DescendPreference::None, }, token, - 0.into(), ); match token.parent().and_then(ast::NameLike::cast) { // Remap the token into the wrapping single token nodes diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index b51d7575a113..28b39b4f1eec 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -35,6 +35,7 @@ impl SpanMap { /// /// Note this does a linear search through the entire backing vector. pub fn ranges_with_span(&self, span: S) -> impl Iterator + '_ { + // FIXME: This should ignore the syntax context! self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| { if s != span { return None; diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 8cc271d226c4..d5d565a015a0 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -121,6 +121,7 @@ impl ast::Whitespace { } } +#[derive(Debug)] pub struct QuoteOffsets { pub quotes: (TextRange, TextRange), pub contents: TextRange, @@ -167,6 +168,11 @@ pub trait IsString: AstToken { fn text_range_between_quotes(&self) -> Option { self.quote_offsets().map(|it| it.contents) } + fn text_without_quotes(&self) -> &str { + let text = self.text(); + let Some(offsets) = self.text_range_between_quotes() else { return text }; + &text[offsets - self.syntax().text_range().start()] + } fn open_quote_text_range(&self) -> Option { self.quote_offsets().map(|it| it.quotes.0) } diff --git a/crates/syntax/src/token_text.rs b/crates/syntax/src/token_text.rs index 09c080c0c230..e69deb49ce14 100644 --- a/crates/syntax/src/token_text.rs +++ b/crates/syntax/src/token_text.rs @@ -13,7 +13,7 @@ pub(crate) enum Repr<'a> { } impl<'a> TokenText<'a> { - pub(crate) fn borrowed(text: &'a str) -> Self { + pub fn borrowed(text: &'a str) -> Self { TokenText(Repr::Borrowed(text)) } From fe0a85ca29fdb9614f10a9aa2ef52fd878d79045 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 5 Dec 2023 16:30:57 +0100 Subject: [PATCH 3/5] Resolve implicit format args in syntax highlighting --- crates/hir/src/semantics.rs | 45 ++++++++++++++----- crates/hir/src/source_analyzer.rs | 23 ++++++++++ crates/ide/src/syntax_highlighting.rs | 2 +- crates/ide/src/syntax_highlighting/format.rs | 20 ++++++++- .../ide/src/syntax_highlighting/highlight.rs | 2 +- .../test_data/highlight_macros.html | 10 +---- .../test_data/highlight_strings.html | 43 +++++++----------- crates/ide/src/syntax_highlighting/tests.rs | 36 +++++---------- crates/test-utils/src/minicore.rs | 29 ++++++++++++ 9 files changed, 136 insertions(+), 74 deletions(-) diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index b0e0f969d98f..d2fd63428b43 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -403,17 +403,29 @@ impl<'db> SemanticsImpl<'db> { ) } - pub fn resolve_offset_in_format_args( + pub fn as_format_args_parts( &self, - string: ast::String, - offset: TextSize, - ) -> Option<(TextRange, Option)> { - debug_assert!(offset <= string.syntax().text_range().len()); - let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; - let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; - let source_analyzer = &self.analyze_no_infer(format_args.syntax())?; - let format_args = self.wrap_node_infile(format_args); - source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset) + string: &ast::String, + ) -> Option)>> { + if let Some(quote) = string.open_quote_text_range() { + return self + .descend_into_macros(DescendPreference::SameText, string.syntax().clone()) + .into_iter() + .find_map(|token| { + let string = ast::String::cast(token)?; + let literal = + string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; + let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; + let source_analyzer = self.analyze_no_infer(format_args.syntax())?; + let format_args = self.wrap_node_infile(format_args); + let res = source_analyzer + .as_format_args_parts(self.db, format_args.as_ref())? + .map(|(range, res)| (range + quote.end(), res)) + .collect(); + Some(res) + }); + } + None } pub fn check_for_format_args_template( @@ -438,6 +450,19 @@ impl<'db> SemanticsImpl<'db> { None } + fn resolve_offset_in_format_args( + &self, + string: ast::String, + offset: TextSize, + ) -> Option<(TextRange, Option)> { + debug_assert!(offset <= string.syntax().text_range().len()); + let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; + let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; + let source_analyzer = &self.analyze_no_infer(format_args.syntax())?; + let format_args = self.wrap_node_infile(format_args); + source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset) + } + /// Maps a node down by mapping its first and last token down. pub fn descend_node_into_attributes(&self, node: N) -> SmallVec<[N; 1]> { // This might not be the correct way to do this, but it works for now diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 9fdee209cfbc..8afa7e065947 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -843,6 +843,29 @@ impl SourceAnalyzer { }) } + pub(crate) fn as_format_args_parts<'a>( + &'a self, + db: &'a dyn HirDatabase, + format_args: InFile<&ast::FormatArgsExpr>, + ) -> Option)> + 'a> { + Some(self.body_source_map()?.implicit_format_args(format_args)?.iter().map( + move |(range, name)| { + ( + *range, + resolve_hir_value_path( + db, + &self.resolver, + self.resolver.body_owner(), + &Path::from_known_path_with_no_generic(ModPath::from_segments( + PathKind::Plain, + Some(name.clone()), + )), + ), + ) + }, + )) + } + fn resolve_impl_method_or_trait_def( &self, db: &dyn HirDatabase, diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index ac14afa13208..45582f54b46c 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -440,7 +440,7 @@ fn traverse( { continue; } - highlight_format_string(hl, &string, &expanded_string, range); + highlight_format_string(hl, sema, krate, &string, &expanded_string, range); if !string.is_raw() { highlight_escape_string(hl, &string, range.start()); diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs index 2ef1315945a0..518e71454798 100644 --- a/crates/ide/src/syntax_highlighting/format.rs +++ b/crates/ide/src/syntax_highlighting/format.rs @@ -1,14 +1,20 @@ //! Syntax highlighting for format macro strings. use ide_db::{ + defs::Definition, syntax_helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier}, SymbolKind, }; use syntax::{ast, TextRange}; -use crate::{syntax_highlighting::highlights::Highlights, HlRange, HlTag}; +use crate::{ + syntax_highlighting::{highlight::highlight_def, highlights::Highlights}, + HlRange, HlTag, +}; pub(super) fn highlight_format_string( stack: &mut Highlights, + sema: &hir::Semantics<'_, ide_db::RootDatabase>, + krate: hir::Crate, string: &ast::String, expanded_string: &ast::String, range: TextRange, @@ -27,6 +33,18 @@ pub(super) fn highlight_format_string( }); } }); + + if let Some(parts) = sema.as_format_args_parts(string) { + parts.into_iter().for_each(|(range, res)| { + if let Some(res) = res { + stack.add(HlRange { + range, + highlight: highlight_def(sema, krate, Definition::from(res)), + binding_hash: None, + }) + } + }) + } } fn highlight_format_specifier(kind: FormatSpecifier) -> Option { diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index 1bffab29cf93..d510c11c3d5a 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -348,7 +348,7 @@ fn calc_binding_hash(name: &hir::Name, shadow_count: u32) -> u64 { hash((name, shadow_count)) } -fn highlight_def( +pub(super) fn highlight_def( sema: &Semantics<'_, RootDatabase>, krate: hir::Crate, def: Definition, diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html index 06b66b302ae0..b8d38a60fc01 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -90,17 +90,11 @@ } } -#[rustc_builtin_macro] -macro_rules! concat {} -#[rustc_builtin_macro] -macro_rules! include {} -#[rustc_builtin_macro] -macro_rules! format_args {} -include!(concat!("foo/", "foo.rs")); +include!(concat!("foo/", "foo.rs")); fn main() { - format_args!("Hello, {}!", 92); + format_args!("Hello, {}!", 92); dont_color_me_braces!(); noop!(noop!(1)); } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html index aa79cd9b829f..b40295684df9 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html @@ -48,42 +48,29 @@ $crate::io::_print(format_args_nl!($($arg)*)); }) } -#[rustc_builtin_macro] -#[macro_export] -macro_rules! format_args_nl {} mod panic { pub macro panic_2015 { () => ( - $crate::panicking::panic("explicit panic") + panic("explicit panic") ), ($msg:literal $(,)?) => ( - $crate::panicking::panic($msg) + panic($msg) ), // Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint. ($msg:expr $(,)?) => ( - $crate::panicking::panic_str($msg) + panic_str($msg) ), // Special-case the single-argument case for const_panic. ("{}", $arg:expr $(,)?) => ( - $crate::panicking::panic_display(&$arg) + panic_display(&$arg) ), ($fmt:expr, $($arg:tt)+) => ( - $crate::panicking::panic_fmt(const_format_args!($fmt, $($arg)+)) + panic_fmt(const_format_args!($fmt, $($arg)+)) ), } } -#[rustc_builtin_macro(std_panic)] -#[macro_export] -macro_rules! panic {} -#[rustc_builtin_macro] -macro_rules! assert {} -#[rustc_builtin_macro] -macro_rules! asm {} -#[rustc_builtin_macro] -macro_rules! concat {} - macro_rules! toho { () => ($crate::panic!("not yet implemented")); ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", format_args!($($arg)+))); @@ -165,20 +152,22 @@ println!("{ничоси}", ничоси = 92); println!("{:x?} {} ", thingy, n2); - panic!("{}", 0); - panic!("more {}", 1); - assert!(true, "{}", 1); - assert!(true, "{} asdasd", 1); + panic!("{}", 0); + panic!("more {}", 1); + assert!(true, "{}", 1); + assert!(true, "{} asdasd", 1); toho!("{}fmt", 0); let i: u64 = 3; let o: u64; - asm!( - "mov {0}, {1}", - "add {0}, 5", + asm!( + "mov {0}, {1}", + "add {0}, 5", out(reg) o, in(reg) i, ); - format_args!(concat!("{}"), "{}"); - format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); + const CONSTANT: () = (): + let mut m = (); + format_args!(concat!("{}"), "{}"); + format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); } \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 542d8992531f..935b6b2cb9b1 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -48,6 +48,7 @@ fn macros() { check_highlighting( r#" //- proc_macros: mirror +//- minicore: fmt, include, concat //- /lib.rs crate:lib proc_macros::mirror! { { @@ -96,12 +97,6 @@ macro without_args { } } -#[rustc_builtin_macro] -macro_rules! concat {} -#[rustc_builtin_macro] -macro_rules! include {} -#[rustc_builtin_macro] -macro_rules! format_args {} include!(concat!("foo/", "foo.rs")); @@ -401,48 +396,35 @@ fn test_string_highlighting() { // thus, we have to copy the macro definition from `std` check_highlighting( r#" -//- minicore: fmt +//- minicore: fmt, assert, asm, concat, panic macro_rules! println { ($($arg:tt)*) => ({ $crate::io::_print(format_args_nl!($($arg)*)); }) } -#[rustc_builtin_macro] -#[macro_export] -macro_rules! format_args_nl {} mod panic { pub macro panic_2015 { () => ( - $crate::panicking::panic("explicit panic") + panic("explicit panic") ), ($msg:literal $(,)?) => ( - $crate::panicking::panic($msg) + panic($msg) ), // Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint. ($msg:expr $(,)?) => ( - $crate::panicking::panic_str($msg) + panic_str($msg) ), // Special-case the single-argument case for const_panic. ("{}", $arg:expr $(,)?) => ( - $crate::panicking::panic_display(&$arg) + panic_display(&$arg) ), ($fmt:expr, $($arg:tt)+) => ( - $crate::panicking::panic_fmt(const_format_args!($fmt, $($arg)+)) + panic_fmt(const_format_args!($fmt, $($arg)+)) ), } } -#[rustc_builtin_macro(std_panic)] -#[macro_export] -macro_rules! panic {} -#[rustc_builtin_macro] -macro_rules! assert {} -#[rustc_builtin_macro] -macro_rules! asm {} -#[rustc_builtin_macro] -macro_rules! concat {} - macro_rules! toho { () => ($crate::panic!("not yet implemented")); ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", format_args!($($arg)+))); @@ -538,8 +520,10 @@ fn main() { in(reg) i, ); + const CONSTANT: () = (): + let mut m = (); format_args!(concat!("{}"), "{}"); - format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); + format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); }"#, expect_file!["./test_data/highlight_strings.html"], false, diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index d39d62f3620d..f766747d707c 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -9,6 +9,8 @@ //! //! Available flags: //! add: +//! asm: +//! assert: //! as_ref: sized //! bool_impl: option, fn //! builtin_impls: @@ -1366,6 +1368,26 @@ mod macros { } // endregion:panic + // region:asm + #[macro_export] + #[rustc_builtin_macro] + macro_rules! asm { + ($($arg:tt)*) => { + /* compiler built-in */ + }; + } + // endregion:asm + + // region:assert + #[macro_export] + #[rustc_builtin_macro] + macro_rules! assert { + ($($arg:tt)*) => { + /* compiler built-in */ + }; + } + // endregion:assert + // region:fmt #[macro_export] #[rustc_builtin_macro] @@ -1381,6 +1403,13 @@ mod macros { ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; } + #[macro_export] + #[rustc_builtin_macro] + macro_rules! format_args_nl { + ($fmt:expr) => {{ /* compiler built-in */ }}; + ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; + } + #[macro_export] macro_rules! print { ($($arg:tt)*) => {{ From 4525787ed5275c036260340116f0408344a44c74 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 5 Dec 2023 16:32:49 +0100 Subject: [PATCH 4/5] Add test for implicit format args support through nested macro call --- crates/ide/src/hover/tests.rs | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 53585624b68a..8c9d58671e69 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -6673,3 +6673,28 @@ format_args!(r"{$0aaaaa}"); "#]], ); } + +#[test] +fn format_args_implicit_nested() { + check( + r#" +//- minicore: fmt +macro_rules! foo { + ($($tt:tt)*) => { + format_args!($($tt)*) + } +} +fn test() { +let aaaaa = "foo"; +foo!(r"{$0aaaaa}"); +} +"#, + expect![[r#" + *aaaaa* + + ```rust + let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1 + ``` + "#]], + ); +} From 9b7ec5e31be75ed49b2cc7f341fd93ca6999a303 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 5 Dec 2023 17:04:59 +0100 Subject: [PATCH 5/5] Ignore strings in token trees in syntax highlighting --- crates/hir/src/semantics.rs | 2 +- crates/ide/src/syntax_highlighting.rs | 26 ++++++++++++++----- .../test_data/highlight_strings.html | 5 ++++ crates/ide/src/syntax_highlighting/tests.rs | 5 ++++ 4 files changed, 30 insertions(+), 8 deletions(-) diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index d2fd63428b43..ac70c27785c5 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -606,8 +606,8 @@ impl<'db> SemanticsImpl<'db> { } Dp::None => true, }; + res = value; if is_a_match { - res = value; ControlFlow::Break(()) } else { ControlFlow::Continue(()) diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index 45582f54b46c..366a3c969f93 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -393,13 +393,25 @@ fn traverse( // Attempt to descend tokens into macro-calls. let res = match element { NodeOrToken::Token(token) if token.kind() != COMMENT => { - let token = sema.descend_into_macros_single( - match attr_or_derive_item { - Some(AttrOrDerive::Attr(_)) => DescendPreference::SameKind, - Some(AttrOrDerive::Derive(_)) | None => DescendPreference::None, - }, - token, - ); + let token = if token.kind() == STRING { + // for strings, try to prefer a string that has not been lost in a token + // tree + // FIXME: This should be done for everything, but check perf first + sema.descend_into_macros(DescendPreference::SameKind, token) + .into_iter() + .max_by_key(|it| { + it.parent().map_or(false, |it| it.kind() != TOKEN_TREE) + }) + .unwrap() + } else { + sema.descend_into_macros_single( + match attr_or_derive_item { + Some(AttrOrDerive::Attr(_)) => DescendPreference::SameKind, + Some(AttrOrDerive::Derive(_)) | None => DescendPreference::None, + }, + token, + ) + }; match token.parent().and_then(ast::NameLike::cast) { // Remap the token into the wrapping single token nodes Some(parent) => match (token.kind(), parent.syntax().kind()) { diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html index b40295684df9..75cb6223e0e6 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html @@ -76,6 +76,10 @@ ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", format_args!($($arg)+))); } +macro_rules! reuse_twice { + ($literal:literal) => {{stringify!($literal); format_args!($literal)}}; +} + fn main() { let a = '\n'; let a = '\t'; @@ -170,4 +174,5 @@ let mut m = (); format_args!(concat!("{}"), "{}"); format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); + reuse_twice!("{backslash}"); } \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 935b6b2cb9b1..fcfd3c92571b 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -430,6 +430,10 @@ macro_rules! toho { ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", format_args!($($arg)+))); } +macro_rules! reuse_twice { + ($literal:literal) => {{stringify!($literal); format_args!($literal)}}; +} + fn main() { let a = '\n'; let a = '\t'; @@ -524,6 +528,7 @@ fn main() { let mut m = (); format_args!(concat!("{}"), "{}"); format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); + reuse_twice!("{backslash}"); }"#, expect_file!["./test_data/highlight_strings.html"], false,