diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 9f89bcf9c3d0..54f35cf0a6ee 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1388,9 +1388,13 @@ impl Function { let loc = self.id.lookup(db.upcast()); let krate = loc.krate(db); let def_map = db.crate_def_map(krate.into()); - let name = &function_data.name; + let ast_id = + InFile::new(loc.id.file_id(), loc.id.item_tree(db.upcast())[loc.id.value].ast_id); + let mut exported_proc_macros = def_map.exported_proc_macros(); - exported_proc_macros.find(|(_, mac_name)| mac_name == name).map(|(id, _)| MacroDef { id }) + exported_proc_macros + .find(|&(id, _)| matches!(id.kind, MacroDefKind::ProcMacro(_, _, id) if id == ast_id)) + .map(|(id, _)| MacroDef { id }) } /// A textual representation of the HIR of this function for debugging purposes. @@ -2436,7 +2440,7 @@ impl Impl { #[derive(Clone, PartialEq, Eq, Debug)] pub struct Type { - krate: CrateId, + krate: CrateId, // FIXME this is probably redundant with the TraitEnvironment env: Arc, ty: Ty, } @@ -2503,6 +2507,10 @@ impl Type { matches!(self.ty.kind(Interner), TyKind::Ref(..)) } + pub fn is_slice(&self) -> bool { + matches!(self.ty.kind(Interner), TyKind::Slice(..)) + } + pub fn is_usize(&self) -> bool { matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Uint(UintTy::Usize))) } @@ -2525,12 +2533,9 @@ impl Type { /// Checks that particular type `ty` implements `std::future::Future`. /// This function is used in `.await` syntax completion. pub fn impls_future(&self, db: &dyn HirDatabase) -> bool { - // No special case for the type of async block, since Chalk can figure it out. - - let krate = self.krate; - - let std_future_trait = - db.lang_item(krate, SmolStr::new_inline("future_trait")).and_then(|it| it.as_trait()); + let std_future_trait = db + .lang_item(self.krate, SmolStr::new_inline("future_trait")) + .and_then(|it| it.as_trait()); let std_future_trait = match std_future_trait { Some(it) => it, None => return false, @@ -2538,13 +2543,7 @@ impl Type { let canonical_ty = Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) }; - method_resolution::implements_trait( - &canonical_ty, - db, - self.env.clone(), - krate, - std_future_trait, - ) + method_resolution::implements_trait(&canonical_ty, db, self.env.clone(), std_future_trait) } /// Checks that particular type `ty` implements `std::ops::FnOnce`. @@ -2552,9 +2551,7 @@ impl Type { /// This function can be used to check if a particular type is callable, since FnOnce is a /// supertrait of Fn and FnMut, so all callable types implements at least FnOnce. pub fn impls_fnonce(&self, db: &dyn HirDatabase) -> bool { - let krate = self.krate; - - let fnonce_trait = match FnTrait::FnOnce.get_id(db, krate) { + let fnonce_trait = match FnTrait::FnOnce.get_id(db, self.krate) { Some(it) => it, None => return false, }; @@ -2565,7 +2562,6 @@ impl Type { &canonical_ty, db, self.env.clone(), - krate, fnonce_trait, ) } @@ -2736,9 +2732,8 @@ impl Type { pub fn autoderef_<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator + 'a { // There should be no inference vars in types passed here let canonical = hir_ty::replace_errors_with_variables(&self.ty); - let environment = self.env.env.clone(); - let ty = InEnvironment { goal: canonical, environment }; - autoderef(db, Some(self.krate), ty).map(|canonical| canonical.value) + let environment = self.env.clone(); + autoderef(db, environment, canonical).map(|canonical| canonical.value) } // This would be nicer if it just returned an iterator, but that runs into @@ -2793,24 +2788,26 @@ impl Type { pub fn iterate_method_candidates( &self, db: &dyn HirDatabase, - krate: Crate, + scope: &SemanticsScope, + // FIXME this can be retrieved from `scope`, except autoimport uses this + // to specify a different set, so the method needs to be split traits_in_scope: &FxHashSet, with_local_impls: Option, name: Option<&Name>, - mut callback: impl FnMut(Type, Function) -> Option, + mut callback: impl FnMut(Function) -> Option, ) -> Option { let _p = profile::span("iterate_method_candidates"); let mut slot = None; self.iterate_method_candidates_dyn( db, - krate, + scope, traits_in_scope, with_local_impls, name, - &mut |ty, assoc_item_id| { + &mut |assoc_item_id| { if let AssocItemId::FunctionId(func) = assoc_item_id { - if let Some(res) = callback(self.derived(ty.clone()), func.into()) { + if let Some(res) = callback(func.into()) { slot = Some(res); return ControlFlow::Break(()); } @@ -2824,50 +2821,55 @@ impl Type { fn iterate_method_candidates_dyn( &self, db: &dyn HirDatabase, - krate: Crate, + scope: &SemanticsScope, traits_in_scope: &FxHashSet, with_local_impls: Option, name: Option<&Name>, - callback: &mut dyn FnMut(&Ty, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>, ) { // There should be no inference vars in types passed here let canonical = hir_ty::replace_errors_with_variables(&self.ty); - let env = self.env.clone(); - let krate = krate.id; + let krate = match scope.krate() { + Some(k) => k, + None => return, + }; + let environment = scope.resolver().generic_def().map_or_else( + || Arc::new(TraitEnvironment::empty(krate.id)), + |d| db.trait_environment(d), + ); method_resolution::iterate_method_candidates_dyn( &canonical, db, - env, - krate, + environment, traits_in_scope, with_local_impls.and_then(|b| b.id.containing_block()).into(), name, method_resolution::LookupMode::MethodCall, - &mut |ty, id| callback(&ty.value, id), + &mut |_adj, id| callback(id), ); } pub fn iterate_path_candidates( &self, db: &dyn HirDatabase, - krate: Crate, + scope: &SemanticsScope, traits_in_scope: &FxHashSet, with_local_impls: Option, name: Option<&Name>, - mut callback: impl FnMut(Type, AssocItem) -> Option, + mut callback: impl FnMut(AssocItem) -> Option, ) -> Option { let _p = profile::span("iterate_path_candidates"); let mut slot = None; self.iterate_path_candidates_dyn( db, - krate, + scope, traits_in_scope, with_local_impls, name, - &mut |ty, assoc_item_id| { - if let Some(res) = callback(self.derived(ty.clone()), assoc_item_id.into()) { + &mut |assoc_item_id| { + if let Some(res) = callback(assoc_item_id.into()) { slot = Some(res); return ControlFlow::Break(()); } @@ -2880,27 +2882,31 @@ impl Type { fn iterate_path_candidates_dyn( &self, db: &dyn HirDatabase, - krate: Crate, + scope: &SemanticsScope, traits_in_scope: &FxHashSet, with_local_impls: Option, name: Option<&Name>, - callback: &mut dyn FnMut(&Ty, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>, ) { let canonical = hir_ty::replace_errors_with_variables(&self.ty); - let env = self.env.clone(); - let krate = krate.id; + let krate = match scope.krate() { + Some(k) => k, + None => return, + }; + let environment = scope.resolver().generic_def().map_or_else( + || Arc::new(TraitEnvironment::empty(krate.id)), + |d| db.trait_environment(d), + ); - method_resolution::iterate_method_candidates_dyn( + method_resolution::iterate_path_candidates( &canonical, db, - env, - krate, + environment, traits_in_scope, with_local_impls.and_then(|b| b.id.containing_block()).into(), name, - method_resolution::LookupMode::Path, - &mut |ty, id| callback(&ty.value, id), + &mut |id| callback(id), ); } diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 20c360e302ee..2e0dbf82b777 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -1230,6 +1230,10 @@ impl<'a> SemanticsScope<'a> { Some(Crate { id: self.resolver.krate()? }) } + pub(crate) fn resolver(&self) -> &Resolver { + &self.resolver + } + /// Note: `FxHashSet` should be treated as an opaque type, passed into `Type pub fn visible_traits(&self) -> FxHashSet { let resolver = &self.resolver; diff --git a/crates/hir_def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir_def/src/macro_expansion_tests/builtin_fn_macro.rs index 6e91301ecdc8..919dd6c07f03 100644 --- a/crates/hir_def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/crates/hir_def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -313,6 +313,24 @@ fn main() { "foor0bar\nfalse"; } ); } +#[test] +fn test_concat_bytes_expand() { + check( + r##" +#[rustc_builtin_macro] +macro_rules! concat_bytes {} + +fn main() { concat_bytes!(b'A', b"BC", [68, b'E', 70]); } +"##, + expect![[r##" +#[rustc_builtin_macro] +macro_rules! concat_bytes {} + +fn main() { [b'A', 66, 67, 68, b'E', 70]; } +"##]], + ); +} + #[test] fn test_concat_with_captured_expr() { check( diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs index 2dd7cc485969..9176c90ae9c0 100644 --- a/crates/hir_def/src/nameres/collector.rs +++ b/crates/hir_def/src/nameres/collector.rs @@ -1417,13 +1417,6 @@ impl ModCollector<'_, '_> { } fn collect(&mut self, items: &[ModItem], container: ItemContainerId) { - struct DefData<'a> { - id: ModuleDefId, - name: &'a Name, - visibility: &'a RawVisibility, - has_constructor: bool, - } - let krate = self.def_collector.def_map.krate; // Note: don't assert that inserted value is fresh: it's simply not true @@ -1473,22 +1466,37 @@ impl ModCollector<'_, '_> { continue; } + let db = self.def_collector.db; let module = self.def_collector.def_map.module_id(self.module_id); + let def_map = &mut self.def_collector.def_map; + let update_def = + |def_collector: &mut DefCollector, id, name: &Name, vis, has_constructor| { + def_collector.def_map.modules[self.module_id].scope.declare(id); + def_collector.update( + self.module_id, + &[(Some(name.clone()), PerNs::from_def(id, vis, has_constructor))], + vis, + ImportType::Named, + ) + }; + let resolve_vis = |def_map: &DefMap, visibility| { + def_map + .resolve_visibility(db, self.module_id, visibility) + .unwrap_or(Visibility::Public) + }; - let mut def = None; match item { ModItem::Mod(m) => self.collect_module(&self.item_tree[m], &attrs), ModItem::Import(import_id) => { - let module_id = self.module_id; let imports = Import::from_use( - self.def_collector.db, + db, krate, self.item_tree, ItemTreeId::new(self.tree_id, import_id), ); self.def_collector.unresolved_imports.extend(imports.into_iter().map( |import| ImportDirective { - module_id, + module_id: self.module_id, import, status: PartialResolvedImport::Unresolved, }, @@ -1498,7 +1506,7 @@ impl ModCollector<'_, '_> { self.def_collector.unresolved_imports.push(ImportDirective { module_id: self.module_id, import: Import::from_extern_crate( - self.def_collector.db, + db, krate, self.item_tree, ItemTreeId::new(self.tree_id, import_id), @@ -1513,7 +1521,7 @@ impl ModCollector<'_, '_> { container: module, id: ItemTreeId::new(self.tree_id, block), } - .intern(self.def_collector.db), + .intern(db), ), ), ModItem::MacroCall(mac) => self.collect_macro_call(&self.item_tree[mac], container), @@ -1523,73 +1531,84 @@ impl ModCollector<'_, '_> { let module = self.def_collector.def_map.module_id(self.module_id); let impl_id = ImplLoc { container: module, id: ItemTreeId::new(self.tree_id, imp) } - .intern(self.def_collector.db); + .intern(db); self.def_collector.def_map.modules[self.module_id].scope.define_impl(impl_id) } ModItem::Function(id) => { - let func = &self.item_tree[id]; - - let ast_id = InFile::new(self.file_id(), func.ast_id); - self.collect_proc_macro_def(&func.name, ast_id, &attrs); + let it = &self.item_tree[id]; - def = Some(DefData { - id: FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let is_proc_macro = attrs.parse_proc_macro_decl(&it.name); + let vis = match is_proc_macro { + Some(proc_macro) => { + // FIXME: this should only be done in the root module of `proc-macro` crates, not everywhere + let ast_id = InFile::new(self.tree_id.file_id(), it.ast_id); + let module_id = def_map.module_id(def_map.root()); + self.def_collector.export_proc_macro(proc_macro, ast_id); + Visibility::Module(module_id) + } + None => resolve_vis(def_map, &self.item_tree[it.visibility]), + }; + update_def( + self.def_collector, + FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &func.name, - visibility: &self.item_tree[func.visibility], - has_constructor: false, - }); + &it.name, + vis, + false, + ); } ModItem::Struct(id) => { let it = &self.item_tree[id]; - def = Some(DefData { - id: StructLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def( + self.def_collector, + StructLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: !matches!(it.fields, Fields::Record(_)), - }); + &it.name, + vis, + !matches!(it.fields, Fields::Record(_)), + ); } ModItem::Union(id) => { let it = &self.item_tree[id]; - def = Some(DefData { - id: UnionLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def( + self.def_collector, + UnionLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); + &it.name, + vis, + false, + ); } ModItem::Enum(id) => { let it = &self.item_tree[id]; - def = Some(DefData { - id: EnumLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def( + self.def_collector, + EnumLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); + &it.name, + vis, + false, + ); } ModItem::Const(id) => { let it = &self.item_tree[id]; - let const_id = ConstLoc { container, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db); + let const_id = + ConstLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db); match &it.name { Some(name) => { - def = Some(DefData { - id: const_id.into(), - name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def(self.def_collector, const_id.into(), name, vis, false); } None => { // const _: T = ...; @@ -1602,55 +1621,46 @@ impl ModCollector<'_, '_> { ModItem::Static(id) => { let it = &self.item_tree[id]; - def = Some(DefData { - id: StaticLoc { container, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def( + self.def_collector, + StaticLoc { container, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); + &it.name, + vis, + false, + ); } ModItem::Trait(id) => { let it = &self.item_tree[id]; - def = Some(DefData { - id: TraitLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def( + self.def_collector, + TraitLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); + &it.name, + vis, + false, + ); } ModItem::TypeAlias(id) => { let it = &self.item_tree[id]; - def = Some(DefData { - id: TypeAliasLoc { container, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def( + self.def_collector, + TypeAliasLoc { container, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); + &it.name, + vis, + false, + ); } } - - if let Some(DefData { id, name, visibility, has_constructor }) = def { - self.def_collector.def_map.modules[self.module_id].scope.declare(id); - let vis = self - .def_collector - .def_map - .resolve_visibility(self.def_collector.db, self.module_id, visibility) - .unwrap_or(Visibility::Public); - self.def_collector.update( - self.module_id, - &[(Some(name.clone()), PerNs::from_def(id, vis, has_constructor))], - vis, - ImportType::Named, - ) - } } } @@ -1691,8 +1701,9 @@ impl ModCollector<'_, '_> { { Ok((file_id, is_mod_rs, mod_dir)) => { let item_tree = db.file_item_tree(file_id.into()); + let krate = self.def_collector.def_map.krate; let is_enabled = item_tree - .top_level_attrs(db, self.def_collector.def_map.krate) + .top_level_attrs(db, krate) .cfg() .map_or(true, |cfg| self.is_cfg_enabled(&cfg)); if is_enabled { @@ -1703,7 +1714,7 @@ impl ModCollector<'_, '_> { &self.item_tree[module.visibility], ); ModCollector { - def_collector: &mut *self.def_collector, + def_collector: self.def_collector, macro_depth: self.macro_depth, module_id, tree_id: TreeId::new(file_id.into(), None), @@ -1713,7 +1724,7 @@ impl ModCollector<'_, '_> { .collect_in_top_module(item_tree.top_level_items()); let is_macro_use = is_macro_use || item_tree - .top_level_attrs(db, self.def_collector.def_map.krate) + .top_level_attrs(db, krate) .by_key("macro_use") .exists(); if is_macro_use { @@ -1738,12 +1749,11 @@ impl ModCollector<'_, '_> { definition: Option<(FileId, bool)>, visibility: &crate::visibility::RawVisibility, ) -> LocalModuleId { - let vis = self - .def_collector - .def_map + let def_map = &mut self.def_collector.def_map; + let vis = def_map .resolve_visibility(self.def_collector.db, self.module_id, visibility) .unwrap_or(Visibility::Public); - let modules = &mut self.def_collector.def_map.modules; + let modules = &mut def_map.modules; let origin = match definition { None => ModuleOrigin::Inline { definition: declaration }, Some((definition, is_mod_rs)) => { @@ -1758,10 +1768,10 @@ impl ModCollector<'_, '_> { } modules[self.module_id].children.insert(name.clone(), res); - let module = self.def_collector.def_map.module_id(res); + let module = def_map.module_id(res); let def = ModuleDefId::from(module); - self.def_collector.def_map.modules[self.module_id].scope.declare(def); + def_map.modules[self.module_id].scope.declare(def); self.def_collector.update( self.module_id, &[(Some(name), PerNs::from_def(def, vis, false))], @@ -1835,14 +1845,6 @@ impl ModCollector<'_, '_> { Ok(()) } - /// If `attrs` registers a procedural macro, collects its definition. - fn collect_proc_macro_def(&mut self, func_name: &Name, ast_id: AstId, attrs: &Attrs) { - // FIXME: this should only be done in the root module of `proc-macro` crates, not everywhere - if let Some(proc_macro) = attrs.parse_proc_macro_decl(func_name) { - self.def_collector.export_proc_macro(proc_macro, ast_id); - } - } - fn collect_macro_rules(&mut self, id: FileItemTreeId) { let krate = self.def_collector.def_map.krate; let mac = &self.item_tree[id]; diff --git a/crates/hir_def/src/visibility.rs b/crates/hir_def/src/visibility.rs index 80009010c521..f76034a3e221 100644 --- a/crates/hir_def/src/visibility.rs +++ b/crates/hir_def/src/visibility.rs @@ -1,6 +1,6 @@ //! Defines hir-level representation of visibility (e.g. `pub` and `pub(crate)`). -use std::sync::Arc; +use std::{iter, sync::Arc}; use hir_expand::{hygiene::Hygiene, InFile}; use la_arena::ArenaMap; @@ -25,7 +25,7 @@ pub enum RawVisibility { } impl RawVisibility { - pub(crate) fn private() -> RawVisibility { + pub(crate) const fn private() -> RawVisibility { RawVisibility::Module(ModPath::from_kind(PathKind::Super(0))) } @@ -113,10 +113,7 @@ impl Visibility { } pub(crate) fn is_visible_from_other_crate(self) -> bool { - match self { - Visibility::Module(_) => false, - Visibility::Public => true, - } + matches!(self, Visibility::Public) } pub(crate) fn is_visible_from_def_map( @@ -145,10 +142,7 @@ impl Visibility { arc = to_module.def_map(db); &arc }; - let is_block_root = match to_module.block { - Some(_) => to_module_def_map[to_module.local_id].parent.is_none(), - None => false, - }; + let is_block_root = matches!(to_module.block, Some(_) if to_module_def_map[to_module.local_id].parent.is_none()); if is_block_root { to_module = to_module_def_map.containing_module(to_module.local_id).unwrap(); } @@ -161,9 +155,7 @@ impl Visibility { return true; } match def_map[from_module].parent { - Some(parent) => { - from_module = parent; - } + Some(parent) => from_module = parent, None => { match def_map.parent() { Some(module) => { @@ -171,10 +163,8 @@ impl Visibility { def_map = &*parent_arc; from_module = module.local_id; } - None => { - // Reached the root module, nothing left to check. - return false; - } + // Reached the root module, nothing left to check. + None => return false, } } } @@ -194,12 +184,12 @@ impl Visibility { return None; } - let mut a_ancestors = std::iter::successors(Some(mod_a.local_id), |m| { - let parent_id = def_map[*m].parent?; + let mut a_ancestors = iter::successors(Some(mod_a.local_id), |&m| { + let parent_id = def_map[m].parent?; Some(parent_id) }); - let mut b_ancestors = std::iter::successors(Some(mod_b.local_id), |m| { - let parent_id = def_map[*m].parent?; + let mut b_ancestors = iter::successors(Some(mod_b.local_id), |&m| { + let parent_id = def_map[m].parent?; Some(parent_id) }); diff --git a/crates/hir_expand/src/builtin_fn_macro.rs b/crates/hir_expand/src/builtin_fn_macro.rs index 5876be81b458..17f0caca7aca 100644 --- a/crates/hir_expand/src/builtin_fn_macro.rs +++ b/crates/hir_expand/src/builtin_fn_macro.rs @@ -114,6 +114,7 @@ register_builtin! { (cfg, Cfg) => cfg_expand, (core_panic, CorePanic) => panic_expand, (std_panic, StdPanic) => panic_expand, + (unreachable, Unreachable) => unreachable_expand, (log_syntax, LogSyntax) => log_syntax_expand, (trace_macros, TraceMacros) => trace_macros_expand, @@ -121,6 +122,7 @@ register_builtin! { (compile_error, CompileError) => compile_error_expand, (concat, Concat) => concat_expand, (concat_idents, ConcatIdents) => concat_idents_expand, + (concat_bytes, ConcatBytes) => concat_bytes_expand, (include, Include) => include_expand, (include_bytes, IncludeBytes) => include_bytes_expand, (include_str, IncludeStr) => include_str_expand, @@ -353,12 +355,37 @@ fn panic_expand( ExpandResult::ok(call) } +fn unreachable_expand( + db: &dyn AstDatabase, + id: MacroCallId, + tt: &tt::Subtree, +) -> ExpandResult { + let loc: MacroCallLoc = db.lookup_intern_macro_call(id); + // Expand to a macro call `$crate::panic::unreachable_{edition}` + let krate = tt::Ident { text: "$crate".into(), id: tt::TokenId::unspecified() }; + let mut call = if db.crate_graph()[loc.krate].edition == Edition::Edition2021 { + quote!(#krate::panic::unreachable_2021!) + } else { + quote!(#krate::panic::unreachable_2015!) + }; + + // Pass the original arguments + call.token_trees.push(tt::TokenTree::Subtree(tt.clone())); + ExpandResult::ok(call) +} + fn unquote_str(lit: &tt::Literal) -> Option { let lit = ast::make::tokens::literal(&lit.to_string()); let token = ast::String::cast(lit)?; token.value().map(|it| it.into_owned()) } +fn unquote_byte_string(lit: &tt::Literal) -> Option> { + let lit = ast::make::tokens::literal(&lit.to_string()); + let token = ast::ByteString::cast(lit)?; + token.value().map(|it| it.into_owned()) +} + fn compile_error_expand( _db: &dyn AstDatabase, _id: MacroCallId, @@ -422,6 +449,74 @@ fn concat_expand( ExpandResult { value: ExpandedEager::new(quote!(#text)), err } } +fn concat_bytes_expand( + _db: &dyn AstDatabase, + _arg_id: MacroCallId, + tt: &tt::Subtree, +) -> ExpandResult { + let mut bytes = Vec::new(); + let mut err = None; + for (i, t) in tt.token_trees.iter().enumerate() { + match t { + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { + let token = ast::make::tokens::literal(&lit.to_string()); + match token.kind() { + syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()), + syntax::SyntaxKind::BYTE_STRING => { + let components = unquote_byte_string(lit).unwrap_or_else(|| Vec::new()); + components.into_iter().for_each(|x| bytes.push(x.to_string())); + } + _ => { + err.get_or_insert(mbe::ExpandError::UnexpectedToken.into()); + break; + } + } + } + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), + tt::TokenTree::Subtree(tree) + if tree.delimiter_kind() == Some(tt::DelimiterKind::Bracket) => + { + if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes) { + err.get_or_insert(e); + break; + } + } + _ => { + err.get_or_insert(mbe::ExpandError::UnexpectedToken.into()); + break; + } + } + } + let ident = tt::Ident { text: bytes.join(", ").into(), id: tt::TokenId::unspecified() }; + ExpandResult { value: ExpandedEager::new(quote!([#ident])), err } +} + +fn concat_bytes_expand_subtree( + tree: &tt::Subtree, + bytes: &mut Vec, +) -> Result<(), ExpandError> { + for (ti, tt) in tree.token_trees.iter().enumerate() { + match tt { + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { + let lit = ast::make::tokens::literal(&lit.to_string()); + match lit.kind() { + syntax::SyntaxKind::BYTE | syntax::SyntaxKind::INT_NUMBER => { + bytes.push(lit.text().to_string()) + } + _ => { + return Err(mbe::ExpandError::UnexpectedToken.into()); + } + } + } + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if ti % 2 == 1 && punct.char == ',' => (), + _ => { + return Err(mbe::ExpandError::UnexpectedToken.into()); + } + } + } + Ok(()) +} + fn concat_idents_expand( _db: &dyn AstDatabase, _arg_id: MacroCallId, diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs index c36bd09e2b3b..f62c3ca0a004 100644 --- a/crates/hir_expand/src/name.rs +++ b/crates/hir_expand/src/name.rs @@ -233,6 +233,7 @@ pub mod known { column, compile_error, concat_idents, + concat_bytes, concat, const_format_args, core_panic, @@ -252,6 +253,7 @@ pub mod known { std_panic, stringify, trace_macros, + unreachable, // Builtin derives Copy, Clone, diff --git a/crates/hir_ty/src/autoderef.rs b/crates/hir_ty/src/autoderef.rs index 6266554ecf67..dffb36b5de31 100644 --- a/crates/hir_ty/src/autoderef.rs +++ b/crates/hir_ty/src/autoderef.rs @@ -3,20 +3,16 @@ //! reference to a type with the field `bar`. This is an approximation of the //! logic in rustc (which lives in librustc_typeck/check/autoderef.rs). -use std::iter::successors; +use std::sync::Arc; -use base_db::CrateId; -use chalk_ir::{cast::Cast, fold::Fold, interner::HasInterner, VariableKind}; -use hir_def::lang_item::LangItemTarget; +use chalk_ir::cast::Cast; use hir_expand::name::name; use limit::Limit; use syntax::SmolStr; -use tracing::{info, warn}; use crate::{ - db::HirDatabase, static_lifetime, AliasEq, AliasTy, BoundVar, Canonical, CanonicalVarKinds, - ConstrainedSubst, DebruijnIndex, Environment, Guidance, InEnvironment, Interner, - ProjectionTyExt, Solution, Substitution, Ty, TyBuilder, TyKind, + db::HirDatabase, infer::unify::InferenceTable, Canonical, Goal, Interner, ProjectionTyExt, + TraitEnvironment, Ty, TyBuilder, TyKind, }; static AUTODEREF_RECURSION_LIMIT: Limit = Limit::new(10); @@ -26,40 +22,34 @@ pub(crate) enum AutoderefKind { Overloaded, } -pub(crate) struct Autoderef<'db> { - db: &'db dyn HirDatabase, - ty: Canonical, +pub(crate) struct Autoderef<'a, 'db> { + pub(crate) table: &'a mut InferenceTable<'db>, + ty: Ty, at_start: bool, - krate: Option, - environment: Environment, steps: Vec<(AutoderefKind, Ty)>, } -impl<'db> Autoderef<'db> { - pub(crate) fn new( - db: &'db dyn HirDatabase, - krate: Option, - ty: InEnvironment>, - ) -> Self { - let InEnvironment { goal: ty, environment } = ty; - Autoderef { db, ty, at_start: true, environment, krate, steps: Vec::new() } +impl<'a, 'db> Autoderef<'a, 'db> { + pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty) -> Self { + let ty = table.resolve_ty_shallow(&ty); + Autoderef { table, ty, at_start: true, steps: Vec::new() } } pub(crate) fn step_count(&self) -> usize { self.steps.len() } - pub(crate) fn steps(&self) -> &[(AutoderefKind, chalk_ir::Ty)] { + pub(crate) fn steps(&self) -> &[(AutoderefKind, Ty)] { &self.steps } pub(crate) fn final_ty(&self) -> Ty { - self.ty.value.clone() + self.ty.clone() } } -impl Iterator for Autoderef<'_> { - type Item = (Canonical, usize); +impl Iterator for Autoderef<'_, '_> { + type Item = (Ty, usize); fn next(&mut self) -> Option { if self.at_start { @@ -71,54 +61,42 @@ impl Iterator for Autoderef<'_> { return None; } - let (kind, new_ty) = if let Some(derefed) = builtin_deref(&self.ty.value) { - ( - AutoderefKind::Builtin, - Canonical { value: derefed.clone(), binders: self.ty.binders.clone() }, - ) - } else { - ( - AutoderefKind::Overloaded, - deref_by_trait( - self.db, - self.krate?, - InEnvironment { goal: &self.ty, environment: self.environment.clone() }, - )?, - ) - }; + let (kind, new_ty) = autoderef_step(self.table, self.ty.clone())?; - self.steps.push((kind, self.ty.value.clone())); + self.steps.push((kind, self.ty.clone())); self.ty = new_ty; Some((self.ty.clone(), self.step_count())) } } +pub(crate) fn autoderef_step(table: &mut InferenceTable, ty: Ty) -> Option<(AutoderefKind, Ty)> { + if let Some(derefed) = builtin_deref(&ty) { + Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed))) + } else { + Some((AutoderefKind::Overloaded, deref_by_trait(table, ty)?)) + } +} + // FIXME: replace uses of this with Autoderef above pub fn autoderef<'a>( db: &'a dyn HirDatabase, - krate: Option, - ty: InEnvironment>, + env: Arc, + ty: Canonical, ) -> impl Iterator> + 'a { - let InEnvironment { goal: ty, environment } = ty; - successors(Some(ty), move |ty| { - deref(db, krate?, InEnvironment { goal: ty, environment: environment.clone() }) - }) - .take(AUTODEREF_RECURSION_LIMIT.inner()) + let mut table = InferenceTable::new(db, env); + let ty = table.instantiate_canonical(ty); + let mut autoderef = Autoderef::new(&mut table, ty); + let mut v = Vec::new(); + while let Some((ty, _steps)) = autoderef.next() { + v.push(autoderef.table.canonicalize(ty).value); + } + v.into_iter() } -pub(crate) fn deref( - db: &dyn HirDatabase, - krate: CrateId, - ty: InEnvironment<&Canonical>, -) -> Option> { +pub(crate) fn deref(table: &mut InferenceTable, ty: Ty) -> Option { let _p = profile::span("deref"); - match builtin_deref(&ty.goal.value) { - Some(derefed) => { - Some(Canonical { value: derefed.clone(), binders: ty.goal.binders.clone() }) - } - None => deref_by_trait(db, krate, ty), - } + autoderef_step(table, ty).map(|(_, ty)| ty) } fn builtin_deref(ty: &Ty) -> Option<&Ty> { @@ -129,16 +107,12 @@ fn builtin_deref(ty: &Ty) -> Option<&Ty> { } } -fn deref_by_trait( - db: &dyn HirDatabase, - krate: CrateId, - ty: InEnvironment<&Canonical>, -) -> Option> { +fn deref_by_trait(table: &mut InferenceTable, ty: Ty) -> Option { + let db = table.db; let _p = profile::span("deref_by_trait"); - let deref_trait = match db.lang_item(krate, SmolStr::new_inline("deref"))? { - LangItemTarget::TraitId(it) => it, - _ => return None, - }; + let deref_trait = db + .lang_item(table.trait_env.krate, SmolStr::new_inline("deref")) + .and_then(|l| l.as_trait())?; let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?; let projection = { @@ -148,114 +122,16 @@ fn deref_by_trait( // namely Deref's Self type return None; } - b.push(ty.goal.value.clone()).build() + b.push(ty).build() }; - // FIXME make the Canonical / bound var handling nicer - // Check that the type implements Deref at all let trait_ref = projection.trait_ref(db); - let implements_goal = Canonical { - binders: ty.goal.binders.clone(), - value: InEnvironment { - goal: trait_ref.cast(Interner), - environment: ty.environment.clone(), - }, - }; - if db.trait_solve(krate, implements_goal).is_none() { - return None; - } + let implements_goal: Goal = trait_ref.cast(Interner); + table.try_obligation(implements_goal.clone())?; - // Now do the assoc type projection - let alias_eq = AliasEq { - alias: AliasTy::Projection(projection), - ty: TyKind::BoundVar(BoundVar::new( - DebruijnIndex::INNERMOST, - ty.goal.binders.len(Interner), - )) - .intern(Interner), - }; - - let in_env = InEnvironment { goal: alias_eq.cast(Interner), environment: ty.environment }; - - let canonical = Canonical { - value: in_env, - binders: CanonicalVarKinds::from_iter( - Interner, - ty.goal.binders.iter(Interner).cloned().chain(Some(chalk_ir::WithKind::new( - VariableKind::Ty(chalk_ir::TyVariableKind::General), - chalk_ir::UniverseIndex::ROOT, - ))), - ), - }; - - let solution = db.trait_solve(krate, canonical)?; - - match &solution { - Solution::Unique(Canonical { value: ConstrainedSubst { subst, .. }, binders }) - | Solution::Ambig(Guidance::Definite(Canonical { value: subst, binders })) => { - // FIXME: vars may contain solutions for any inference variables - // that happened to be inside ty. To correctly handle these, we - // would have to pass the solution up to the inference context, but - // that requires a larger refactoring (especially if the deref - // happens during method resolution). So for the moment, we just - // check that we're not in the situation where we would actually - // need to handle the values of the additional variables, i.e. - // they're just being 'passed through'. In the 'standard' case where - // we have `impl Deref for Foo { Target = T }`, that should be - // the case. - - // FIXME: if the trait solver decides to truncate the type, these - // assumptions will be broken. We would need to properly introduce - // new variables in that case - - for i in 1..binders.len(Interner) { - if subst.at(Interner, i - 1).assert_ty_ref(Interner).kind(Interner) - != &TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, i - 1)) - { - warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.goal, solution); - return None; - } - } - // FIXME: we remove lifetime variables here since they can confuse - // the method resolution code later - Some(fixup_lifetime_variables(Canonical { - value: subst.at(Interner, subst.len(Interner) - 1).assert_ty_ref(Interner).clone(), - binders: binders.clone(), - })) - } - Solution::Ambig(_) => { - info!("Ambiguous solution for derefing {:?}: {:?}", ty.goal, solution); - None - } - } -} + table.register_obligation(implements_goal); -fn fixup_lifetime_variables + HasInterner>( - c: Canonical, -) -> Canonical { - // Removes lifetime variables from the Canonical, replacing them by static lifetimes. - let mut i = 0; - let subst = Substitution::from_iter( - Interner, - c.binders.iter(Interner).map(|vk| match vk.kind { - VariableKind::Ty(_) => { - let index = i; - i += 1; - BoundVar::new(DebruijnIndex::INNERMOST, index).to_ty(Interner).cast(Interner) - } - VariableKind::Lifetime => static_lifetime().cast(Interner), - VariableKind::Const(_) => unimplemented!(), - }), - ); - let binders = CanonicalVarKinds::from_iter( - Interner, - c.binders.iter(Interner).filter(|vk| match vk.kind { - VariableKind::Ty(_) => true, - VariableKind::Lifetime => false, - VariableKind::Const(_) => true, - }), - ); - let value = subst.apply(c.value, Interner); - Canonical { binders, value } + let result = table.normalize_projection_ty(projection); + Some(table.resolve_ty_shallow(&result)) } diff --git a/crates/hir_ty/src/display.rs b/crates/hir_ty/src/display.rs index 0e75ddeabcdf..2020834fbc49 100644 --- a/crates/hir_ty/src/display.rs +++ b/crates/hir_ty/src/display.rs @@ -1189,7 +1189,18 @@ impl HirDisplay for Path { write!(f, "super")?; } } - (_, PathKind::DollarCrate(_)) => write!(f, "{{extern_crate}}")?, + (_, PathKind::DollarCrate(id)) => { + // Resolve `$crate` to the crate's display name. + // FIXME: should use the dependency name instead if available, but that depends on + // the crate invoking `HirDisplay` + let crate_graph = f.db.crate_graph(); + let name = crate_graph[*id] + .display_name + .as_ref() + .map(|name| name.canonical_name()) + .unwrap_or("$crate"); + write!(f, "{name}")? + } } for (seg_idx, segment) in self.segments().iter().enumerate() { diff --git a/crates/hir_ty/src/infer.rs b/crates/hir_ty/src/infer.rs index 173380654e0c..c84604d69bf8 100644 --- a/crates/hir_ty/src/infer.rs +++ b/crates/hir_ty/src/infer.rs @@ -46,7 +46,7 @@ use crate::{ pub use unify::could_unify; pub(crate) use unify::unify; -mod unify; +pub(crate) mod unify; mod path; mod expr; mod pat; @@ -228,7 +228,7 @@ pub enum Adjust { /// The target type is `U` in both cases, with the region and mutability /// being those shared by both the receiver and the returned reference. #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct OverloadedDeref(Mutability); +pub struct OverloadedDeref(pub Mutability); #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum AutoBorrow { @@ -455,10 +455,6 @@ impl<'a> InferenceContext<'a> { self.result.method_resolutions.insert(expr, (func, subst)); } - fn write_field_resolution(&mut self, expr: ExprId, field: FieldId) { - self.result.field_resolutions.insert(expr, field); - } - fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) { self.result.variant_resolutions.insert(id, variant); } diff --git a/crates/hir_ty/src/infer/coerce.rs b/crates/hir_ty/src/infer/coerce.rs index bddb79c50159..c24772f29b42 100644 --- a/crates/hir_ty/src/infer/coerce.rs +++ b/crates/hir_ty/src/infer/coerce.rs @@ -259,27 +259,19 @@ impl<'a> InferenceContext<'a> { // details of coercion errors though, so I think it's useful to leave // the structure like it is. - let canonicalized = self.canonicalize(from_ty.clone()); - let mut autoderef = Autoderef::new( - self.db, - self.resolver.krate(), - InEnvironment { - goal: canonicalized.value.clone(), - environment: self.trait_env.env.clone(), - }, - ); + let snapshot = self.table.snapshot(); + + let mut autoderef = Autoderef::new(&mut self.table, from_ty.clone()); let mut first_error = None; let mut found = None; - for (referent_ty, autoderefs) in autoderef.by_ref() { + while let Some((referent_ty, autoderefs)) = autoderef.next() { if autoderefs == 0 { // Don't let this pass, otherwise it would cause // &T to autoref to &&T. continue; } - let referent_ty = canonicalized.decanonicalize_ty(&mut self.table, referent_ty); - // At this point, we have deref'd `a` to `referent_ty`. So // imagine we are coercing from `&'a mut Vec` to `&'b mut [T]`. // In the autoderef loop for `&'a mut Vec`, we would get @@ -304,7 +296,7 @@ impl<'a> InferenceContext<'a> { // from `&mut T` to `&U`. let lt = static_lifetime(); // FIXME: handle lifetimes correctly, see rustc let derefd_from_ty = TyKind::Ref(to_mt, lt, referent_ty).intern(Interner); - match self.table.try_unify(&derefd_from_ty, to_ty) { + match autoderef.table.try_unify(&derefd_from_ty, to_ty) { Ok(result) => { found = Some(result.map(|()| derefd_from_ty)); break; @@ -325,6 +317,7 @@ impl<'a> InferenceContext<'a> { let InferOk { value: ty, goals } = match found { Some(d) => d, None => { + self.table.rollback_to(snapshot); let err = first_error.expect("coerce_borrowed_pointer had no error"); return Err(err); } @@ -345,29 +338,13 @@ impl<'a> InferenceContext<'a> { return success(vec![], ty, goals); } - let mut adjustments = self.auto_deref_adjust_steps(&autoderef); + let mut adjustments = auto_deref_adjust_steps(&autoderef); adjustments .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)), target: ty.clone() }); success(adjustments, ty, goals) } - pub(super) fn auto_deref_adjust_steps(&self, autoderef: &Autoderef<'_>) -> Vec { - let steps = autoderef.steps(); - let targets = - steps.iter().skip(1).map(|(_, ty)| ty.clone()).chain(iter::once(autoderef.final_ty())); - steps - .iter() - .map(|(kind, _source)| match kind { - // We do not know what kind of deref we require at this point yet - AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)), - AutoderefKind::Builtin => None, - }) - .zip(targets) - .map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target }) - .collect() - } - /// Attempts to coerce from the type of a Rust function item into a function pointer. fn coerce_from_fn_item(&mut self, from_ty: Ty, to_ty: &Ty) -> CoerceResult { match to_ty.kind(Interner) { @@ -620,3 +597,19 @@ fn coerce_mutabilities(from: Mutability, to: Mutability) -> Result<(), TypeError (Mutability::Not, Mutability::Mut) => Err(TypeError), } } + +pub(super) fn auto_deref_adjust_steps(autoderef: &Autoderef<'_, '_>) -> Vec { + let steps = autoderef.steps(); + let targets = + steps.iter().skip(1).map(|(_, ty)| ty.clone()).chain(iter::once(autoderef.final_ty())); + steps + .iter() + .map(|(kind, _source)| match kind { + // We do not know what kind of deref we require at this point yet + AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)), + AutoderefKind::Builtin => None, + }) + .zip(targets) + .map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target }) + .collect() +} diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs index 13f64d682521..c305af2e9126 100644 --- a/crates/hir_ty/src/infer/expr.rs +++ b/crates/hir_ty/src/infer/expr.rs @@ -1,6 +1,7 @@ //! Type inference for expressions. use std::{ + collections::hash_map::Entry, iter::{repeat, repeat_with}, mem, sync::Arc, @@ -26,15 +27,14 @@ use crate::{ method_resolution, primitive::{self, UintTy}, static_lifetime, to_chalk_trait_id, - traits::FnTrait, utils::{generics, Generics}, - AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, InEnvironment, Interner, - ProjectionTyExt, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, + AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar, + Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, }; use super::{ - find_breakable, BindingMode, BreakableContext, Diverges, Expectation, InferenceContext, - InferenceDiagnostic, TypeMismatch, + coerce::auto_deref_adjust_steps, find_breakable, BindingMode, BreakableContext, Diverges, + Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch, }; impl<'a> InferenceContext<'a> { @@ -77,51 +77,6 @@ impl<'a> InferenceContext<'a> { } } - fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { - let krate = self.resolver.krate()?; - let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?; - let output_assoc_type = - self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; - - let mut arg_tys = vec![]; - let arg_ty = TyBuilder::tuple(num_args) - .fill(repeat_with(|| { - let arg = self.table.new_type_var(); - arg_tys.push(arg.clone()); - arg - })) - .build(); - - let projection = { - let b = TyBuilder::assoc_type_projection(self.db, output_assoc_type); - if b.remaining() != 2 { - return None; - } - b.push(ty.clone()).push(arg_ty).build() - }; - - let trait_env = self.trait_env.env.clone(); - let obligation = InEnvironment { - goal: projection.trait_ref(self.db).cast(Interner), - environment: trait_env, - }; - let canonical = self.canonicalize(obligation.clone()); - if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() { - self.push_obligation(obligation.goal); - let return_ty = self.table.normalize_projection_ty(projection); - Some((arg_tys, return_ty)) - } else { - None - } - } - - pub(crate) fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { - match ty.callable_sig(self.db) { - Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())), - None => self.callable_sig_from_fn_trait(ty, num_args), - } - } - fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { self.db.unwind_if_cancelled(); @@ -319,22 +274,19 @@ impl<'a> InferenceContext<'a> { } Expr::Call { callee, args } => { let callee_ty = self.infer_expr(*callee, &Expectation::none()); - let canonicalized = self.canonicalize(callee_ty.clone()); - let mut derefs = Autoderef::new( - self.db, - self.resolver.krate(), - InEnvironment { - goal: canonicalized.value.clone(), - environment: self.table.trait_env.env.clone(), - }, - ); - let res = derefs.by_ref().find_map(|(callee_deref_ty, _)| { - let ty = &canonicalized.decanonicalize_ty(&mut self.table, callee_deref_ty); - self.callable_sig(ty, args.len()) - }); + let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone()); + let mut res = None; + // manual loop to be able to access `derefs.table` + while let Some((callee_deref_ty, _)) = derefs.next() { + res = derefs.table.callable_sig(&callee_deref_ty, args.len()); + if res.is_some() { + break; + } + } let (param_tys, ret_ty): (Vec, Ty) = match res { Some(res) => { - self.write_expr_adj(*callee, self.auto_deref_adjust_steps(&derefs)); + let adjustments = auto_deref_adjust_steps(&derefs); + self.write_expr_adj(*callee, adjustments); res } None => (Vec::new(), self.err_ty()), @@ -489,78 +441,67 @@ impl<'a> InferenceContext<'a> { } Expr::Field { expr, name } => { let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none()); - let canonicalized = self.canonicalize(receiver_ty); - - let mut autoderef = Autoderef::new( - self.db, - self.resolver.krate(), - InEnvironment { - goal: canonicalized.value.clone(), - environment: self.trait_env.env.clone(), - }, - ); + + let mut autoderef = Autoderef::new(&mut self.table, receiver_ty); let ty = autoderef.by_ref().find_map(|(derefed_ty, _)| { - let module = self.resolver.module(); - let db = self.db; - let is_visible = |field_id: &FieldId| { - module - .map(|mod_id| { - db.field_visibilities(field_id.parent)[field_id.local_id] - .is_visible_from(db.upcast(), mod_id) - }) - .unwrap_or(true) - }; - match canonicalized - .decanonicalize_ty(&mut self.table, derefed_ty) - .kind(Interner) - { - TyKind::Tuple(_, substs) => name.as_tuple_index().and_then(|idx| { - substs - .as_slice(Interner) - .get(idx) - .map(|a| a.assert_ty_ref(Interner)) - .cloned() - }), + let (field_id, parameters) = match derefed_ty.kind(Interner) { + TyKind::Tuple(_, substs) => { + return name.as_tuple_index().and_then(|idx| { + substs + .as_slice(Interner) + .get(idx) + .map(|a| a.assert_ty_ref(Interner)) + .cloned() + }); + } TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => { let local_id = self.db.struct_data(*s).variant_data.field(name)?; let field = FieldId { parent: (*s).into(), local_id }; - if is_visible(&field) { - self.write_field_resolution(tgt_expr, field); - Some( - self.db.field_types((*s).into())[field.local_id] - .clone() - .substitute(Interner, ¶meters), - ) - } else { - None - } + (field, parameters.clone()) } TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => { let local_id = self.db.union_data(*u).variant_data.field(name)?; let field = FieldId { parent: (*u).into(), local_id }; - if is_visible(&field) { - self.write_field_resolution(tgt_expr, field); - Some( - self.db.field_types((*u).into())[field.local_id] - .clone() - .substitute(Interner, ¶meters), - ) - } else { - None - } + (field, parameters.clone()) } - _ => None, + _ => return None, + }; + let module = self.resolver.module(); + let is_visible = module + .map(|mod_id| { + self.db.field_visibilities(field_id.parent)[field_id.local_id] + .is_visible_from(self.db.upcast(), mod_id) + }) + .unwrap_or(true); + if !is_visible { + // Write down the first field resolution even if it is not visible + // This aids IDE features for private fields like goto def and in + // case of autoderef finding an applicable field, this will be + // overwritten in a following cycle + if let Entry::Vacant(entry) = self.result.field_resolutions.entry(tgt_expr) + { + entry.insert(field_id); + } + return None; } + // can't have `write_field_resolution` here because `self.table` is borrowed :( + self.result.field_resolutions.insert(tgt_expr, field_id); + let ty = self.db.field_types(field_id.parent)[field_id.local_id] + .clone() + .substitute(Interner, ¶meters); + Some(ty) }); let ty = match ty { Some(ty) => { - self.write_expr_adj(*expr, self.auto_deref_adjust_steps(&autoderef)); + let adjustments = auto_deref_adjust_steps(&autoderef); + self.write_expr_adj(*expr, adjustments); + let ty = self.insert_type_vars(ty); + let ty = self.normalize_associated_types_in(ty); ty } - None => self.err_ty(), + _ => self.err_ty(), }; - let ty = self.insert_type_vars(ty); - self.normalize_associated_types_in(ty) + ty } Expr::Await { expr } => { let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); @@ -618,25 +559,9 @@ impl<'a> InferenceContext<'a> { let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); let inner_ty = self.resolve_ty_shallow(&inner_ty); match op { - UnaryOp::Deref => match self.resolver.krate() { - Some(krate) => { - let canonicalized = self.canonicalize(inner_ty); - match autoderef::deref( - self.db, - krate, - InEnvironment { - goal: &canonicalized.value, - environment: self.trait_env.env.clone(), - }, - ) { - Some(derefed_ty) => { - canonicalized.decanonicalize_ty(&mut self.table, derefed_ty) - } - None => self.err_ty(), - } - } - None => self.err_ty(), - }, + UnaryOp::Deref => { + autoderef::deref(&mut self.table, inner_ty).unwrap_or_else(|| self.err_ty()) + } UnaryOp::Neg => { match inner_ty.kind(Interner) { // Fast path for builtins @@ -722,20 +647,19 @@ impl<'a> InferenceContext<'a> { let base_ty = self.infer_expr_inner(*base, &Expectation::none()); let index_ty = self.infer_expr(*index, &Expectation::none()); - if let (Some(index_trait), Some(krate)) = - (self.resolve_ops_index(), self.resolver.krate()) - { - let canonicalized = self.canonicalize(base_ty); - let self_ty = method_resolution::resolve_indexing_op( + if let Some(index_trait) = self.resolve_ops_index() { + let canonicalized = self.canonicalize(base_ty.clone()); + let receiver_adjustments = method_resolution::resolve_indexing_op( self.db, - &canonicalized.value, self.trait_env.clone(), - krate, + canonicalized.value, index_trait, ); - let self_ty = self_ty.map_or(self.err_ty(), |t| { - canonicalized.decanonicalize_ty(&mut self.table, t) - }); + let (self_ty, adj) = receiver_adjustments + .map_or((self.err_ty(), Vec::new()), |adj| { + adj.apply(&mut self.table, base_ty) + }); + self.write_expr_adj(*base, adj); self.resolve_associated_type_with_params( self_ty, self.resolve_ops_index_output(), @@ -982,22 +906,20 @@ impl<'a> InferenceContext<'a> { let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast()); - let resolved = self.resolver.krate().and_then(|krate| { - method_resolution::lookup_method( - &canonicalized_receiver.value, - self.db, - self.trait_env.clone(), - krate, - &traits_in_scope, - self.resolver.module().into(), - method_name, - ) - }); + let resolved = method_resolution::lookup_method( + &canonicalized_receiver.value, + self.db, + self.trait_env.clone(), + &traits_in_scope, + self.resolver.module().into(), + method_name, + ); let (receiver_ty, method_ty, substs) = match resolved { - Some((ty, func)) => { - let ty = canonicalized_receiver.decanonicalize_ty(&mut self.table, ty); + Some((adjust, func)) => { + let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty); let generics = generics(self.db.upcast(), func.into()); - let substs = self.substs_for_method_call(generics, generic_args, &ty); + let substs = self.substs_for_method_call(generics, generic_args); + self.write_expr_adj(receiver, adjustments); self.write_method_resolution(tgt_expr, func, substs.clone()); (ty, self.db.value_ty(func.into()), substs) } @@ -1110,20 +1032,15 @@ impl<'a> InferenceContext<'a> { &mut self, def_generics: Generics, generic_args: Option<&GenericArgs>, - receiver_ty: &Ty, ) -> Substitution { let (parent_params, self_params, type_params, impl_trait_params) = def_generics.provenance_split(); assert_eq!(self_params, 0); // method shouldn't have another Self param let total_len = parent_params + type_params + impl_trait_params; let mut substs = Vec::with_capacity(total_len); - // Parent arguments are unknown, except for the receiver type - for (_id, param) in def_generics.iter_parent() { - if param.provenance == hir_def::generics::TypeParamProvenance::TraitSelf { - substs.push(receiver_ty.clone()); - } else { - substs.push(self.table.new_type_var()); - } + // Parent arguments are unknown + for _ in def_generics.iter_parent() { + substs.push(self.table.new_type_var()); } // handle provided type arguments if let Some(generic_args) = generic_args { diff --git a/crates/hir_ty/src/infer/path.rs b/crates/hir_ty/src/infer/path.rs index b63ef2ffdc8a..0d6c8f12d24f 100644 --- a/crates/hir_ty/src/infer/path.rs +++ b/crates/hir_ty/src/infer/path.rs @@ -218,14 +218,12 @@ impl<'a> InferenceContext<'a> { } let canonical_ty = self.canonicalize(ty.clone()); - let krate = self.resolver.krate()?; let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast()); method_resolution::iterate_method_candidates( &canonical_ty.value, self.db, self.table.trait_env.clone(), - krate, &traits_in_scope, self.resolver.module().into(), Some(name), diff --git a/crates/hir_ty/src/infer/unify.rs b/crates/hir_ty/src/infer/unify.rs index bb7cdb677e37..21b48b9d8025 100644 --- a/crates/hir_ty/src/infer/unify.rs +++ b/crates/hir_ty/src/infer/unify.rs @@ -1,6 +1,6 @@ //! Unification and canonicalization logic. -use std::{fmt, mem, sync::Arc}; +use std::{fmt, iter, mem, sync::Arc}; use chalk_ir::{ cast::Cast, fold::Fold, interner::HasInterner, zip::Zip, FloatTy, IntTy, NoSolution, @@ -8,12 +8,14 @@ use chalk_ir::{ }; use chalk_solve::infer::ParameterEnaVariableExt; use ena::unify::UnifyKey; +use hir_expand::name; use super::{InferOk, InferResult, InferenceContext, TypeError}; use crate::{ - db::HirDatabase, fold_tys, static_lifetime, AliasEq, AliasTy, BoundVar, Canonical, Const, - DebruijnIndex, GenericArg, Goal, Guidance, InEnvironment, InferenceVar, Interner, Lifetime, - ProjectionTy, Scalar, Solution, Substitution, TraitEnvironment, Ty, TyKind, VariableKind, + db::HirDatabase, fold_tys, static_lifetime, traits::FnTrait, AliasEq, AliasTy, BoundVar, + Canonical, Const, DebruijnIndex, GenericArg, Goal, Guidance, InEnvironment, InferenceVar, + Interner, Lifetime, ProjectionTy, ProjectionTyExt, Scalar, Solution, Substitution, + TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind, }; impl<'a> InferenceContext<'a> { @@ -24,32 +26,20 @@ impl<'a> InferenceContext<'a> { where T::Result: HasInterner, { - // try to resolve obligations before canonicalizing, since this might - // result in new knowledge about variables - self.resolve_obligations_as_possible(); self.table.canonicalize(t) } } #[derive(Debug, Clone)] -pub(super) struct Canonicalized +pub(crate) struct Canonicalized where T: HasInterner, { - pub(super) value: Canonical, + pub(crate) value: Canonical, free_vars: Vec, } impl> Canonicalized { - /// this method is wrong and shouldn't exist - pub(super) fn decanonicalize_ty(&self, table: &mut InferenceTable, ty: Canonical) -> Ty { - let mut vars = self.free_vars.clone(); - while ty.binders.len(Interner) > vars.len() { - vars.push(table.new_type_var().cast(Interner)); - } - chalk_ir::Substitute::apply(&vars, ty.value, Interner) - } - pub(super) fn apply_solution( &self, ctx: &mut InferenceTable, @@ -203,13 +193,16 @@ impl<'a> InferenceTable<'a> { .intern(Interner) } - pub(super) fn canonicalize + HasInterner>( + pub(crate) fn canonicalize + HasInterner>( &mut self, t: T, ) -> Canonicalized where T::Result: HasInterner, { + // try to resolve obligations before canonicalizing, since this might + // result in new knowledge about variables + self.resolve_obligations_as_possible(); let result = self.var_unification_table.canonicalize(Interner, t); let free_vars = result .free_vars @@ -225,7 +218,7 @@ impl<'a> InferenceTable<'a> { /// type annotation (e.g. from a let type annotation, field type or function /// call). `make_ty` handles this already, but e.g. for field types we need /// to do it as well. - pub(super) fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty { + pub(crate) fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty { fold_tys( ty, |ty, _| match ty.kind(Interner) { @@ -238,7 +231,7 @@ impl<'a> InferenceTable<'a> { ) } - pub(super) fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty { + pub(crate) fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty { let var = self.new_type_var(); let alias_eq = AliasEq { alias: AliasTy::Projection(proj_ty), ty: var.clone() }; let obligation = alias_eq.cast(Interner); @@ -299,6 +292,13 @@ impl<'a> InferenceTable<'a> { self.resolve_with_fallback_inner(&mut Vec::new(), t, &fallback) } + pub(crate) fn instantiate_canonical(&mut self, canonical: Canonical) -> T::Result + where + T: HasInterner + Fold + std::fmt::Debug, + { + self.var_unification_table.instantiate_canonical(Interner, canonical) + } + fn resolve_with_fallback_inner( &mut self, var_stack: &mut Vec, @@ -351,6 +351,7 @@ impl<'a> InferenceTable<'a> { /// If `ty` is a type variable with known type, returns that type; /// otherwise, return ty. pub(crate) fn resolve_ty_shallow(&mut self, ty: &Ty) -> Ty { + self.resolve_obligations_as_possible(); self.var_unification_table.normalize_ty_shallow(Interner, ty).unwrap_or_else(|| ty.clone()) } @@ -363,6 +364,16 @@ impl<'a> InferenceTable<'a> { self.var_unification_table.rollback_to(snapshot.var_table_snapshot); } + /// Checks an obligation without registering it. Useful mostly to check + /// whether a trait *might* be implemented before deciding to 'lock in' the + /// choice (during e.g. method resolution or deref). + pub(crate) fn try_obligation(&mut self, goal: Goal) -> Option { + let in_env = InEnvironment::new(&self.trait_env.env, goal); + let canonicalized = self.canonicalize(in_env); + let solution = self.db.trait_solve(self.trait_env.krate, canonicalized.value); + solution + } + pub(crate) fn register_obligation(&mut self, goal: Goal) { let in_env = InEnvironment::new(&self.trait_env.env, goal); self.register_obligation_in_env(in_env) @@ -522,6 +533,51 @@ impl<'a> InferenceTable<'a> { } } } + + pub(crate) fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { + match ty.callable_sig(self.db) { + Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())), + None => self.callable_sig_from_fn_trait(ty, num_args), + } + } + + fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { + let krate = self.trait_env.krate; + let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?; + let output_assoc_type = + self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; + + let mut arg_tys = vec![]; + let arg_ty = TyBuilder::tuple(num_args) + .fill(iter::repeat_with(|| { + let arg = self.new_type_var(); + arg_tys.push(arg.clone()); + arg + })) + .build(); + + let projection = { + let b = TyBuilder::assoc_type_projection(self.db, output_assoc_type); + if b.remaining() != 2 { + return None; + } + b.push(ty.clone()).push(arg_ty).build() + }; + + let trait_env = self.trait_env.env.clone(); + let obligation = InEnvironment { + goal: projection.trait_ref(self.db).cast(Interner), + environment: trait_env, + }; + let canonical = self.canonicalize(obligation.clone()); + if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() { + self.register_obligation(obligation.goal); + let return_ty = self.normalize_projection_ty(projection); + Some((arg_tys, return_ty)) + } else { + None + } + } } impl<'a> fmt::Debug for InferenceTable<'a> { diff --git a/crates/hir_ty/src/method_resolution.rs b/crates/hir_ty/src/method_resolution.rs index c91b6f2e82d1..44ece57a8e0e 100644 --- a/crates/hir_ty/src/method_resolution.rs +++ b/crates/hir_ty/src/method_resolution.rs @@ -17,10 +17,11 @@ use rustc_hash::{FxHashMap, FxHashSet}; use stdx::never; use crate::{ - autoderef, + autoderef::{self, AutoderefKind}, consteval::{self, ConstExt}, db::HirDatabase, from_foreign_def_id, + infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast}, primitive::{self, FloatTy, IntTy, UintTy}, static_lifetime, utils::all_super_traits, @@ -429,28 +430,25 @@ pub fn def_crates( Some(res) } -/// Look up the method with the given name, returning the actual autoderefed -/// receiver type (but without autoref applied yet). +/// Look up the method with the given name. pub(crate) fn lookup_method( ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: &Name, -) -> Option<(Canonical, FunctionId)> { +) -> Option<(ReceiverAdjustments, FunctionId)> { iterate_method_candidates( ty, db, env, - krate, traits_in_scope, visible_from_module, Some(name), LookupMode::MethodCall, - |ty, f| match f { - AssocItemId::FunctionId(f) => Some((ty.clone(), f)), + |adjustments, f| match f { + AssocItemId::FunctionId(f) => Some((adjustments, f)), _ => None, }, ) @@ -496,33 +494,89 @@ impl From> for VisibleFromModule { } } +#[derive(Debug, Clone, Default)] +pub struct ReceiverAdjustments { + autoref: Option, + autoderefs: usize, + unsize_array: bool, +} + +impl ReceiverAdjustments { + pub(crate) fn apply(&self, table: &mut InferenceTable, ty: Ty) -> (Ty, Vec) { + let mut ty = ty; + let mut adjust = Vec::new(); + for _ in 0..self.autoderefs { + match autoderef::autoderef_step(table, ty.clone()) { + None => { + never!("autoderef not possible for {:?}", ty); + ty = TyKind::Error.intern(Interner); + break; + } + Some((kind, new_ty)) => { + ty = new_ty.clone(); + adjust.push(Adjustment { + kind: Adjust::Deref(match kind { + // FIXME should we know the mutability here? + AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)), + AutoderefKind::Builtin => None, + }), + target: new_ty, + }); + } + } + } + if self.unsize_array { + ty = match ty.kind(Interner) { + TyKind::Array(inner, _) => TyKind::Slice(inner.clone()).intern(Interner), + _ => { + never!("unsize_array with non-array {:?}", ty); + ty + } + }; + // FIXME this is kind of wrong since the unsize needs to happen to a pointer/reference + adjust.push(Adjustment { + kind: Adjust::Pointer(PointerCast::Unsize), + target: ty.clone(), + }); + } + if let Some(m) = self.autoref { + ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner); + adjust + .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty.clone() }); + } + (ty, adjust) + } + + fn with_autoref(&self, m: Mutability) -> ReceiverAdjustments { + Self { autoref: Some(m), ..*self } + } +} + // This would be nicer if it just returned an iterator, but that runs into // lifetime problems, because we need to borrow temp `CrateImplDefs`. // FIXME add a context type here? -pub fn iterate_method_candidates( +pub(crate) fn iterate_method_candidates( ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, mode: LookupMode, - mut callback: impl FnMut(&Canonical, AssocItemId) -> Option, + mut callback: impl FnMut(ReceiverAdjustments, AssocItemId) -> Option, ) -> Option { let mut slot = None; iterate_method_candidates_dyn( ty, db, env, - krate, traits_in_scope, visible_from_module, name, mode, - &mut |ty, item| { + &mut |adj, item| { assert!(slot.is_none()); - if let Some(it) = callback(ty, item) { + if let Some(it) = callback(adj, item) { slot = Some(it); return ControlFlow::Break(()); } @@ -532,28 +586,45 @@ pub fn iterate_method_candidates( slot } +pub fn iterate_path_candidates( + ty: &Canonical, + db: &dyn HirDatabase, + env: Arc, + traits_in_scope: &FxHashSet, + visible_from_module: VisibleFromModule, + name: Option<&Name>, + callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>, +) -> ControlFlow<()> { + iterate_method_candidates_dyn( + ty, + db, + env, + traits_in_scope, + visible_from_module, + name, + LookupMode::Path, + // the adjustments are not relevant for path lookup + &mut |_, id| callback(id), + ) +} + pub fn iterate_method_candidates_dyn( ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, mode: LookupMode, - callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { match mode { LookupMode::MethodCall => { - // For method calls, rust first does any number of autoderef, and then one - // autoref (i.e. when the method takes &self or &mut self). We just ignore - // the autoref currently -- when we find a method matching the given name, - // we assume it fits. - - // Also note that when we've got a receiver like &S, even if the method we - // find in the end takes &self, we still do the autoderef step (just as - // rustc does an autoderef and then autoref again). - let ty = InEnvironment { goal: ty.clone(), environment: env.env.clone() }; + // For method calls, rust first does any number of autoderef, and + // then one autoref (i.e. when the method takes &self or &mut self). + // Note that when we've got a receiver like &S, even if the method + // we find in the end takes &self, we still do the autoderef step + // (just as rustc does an autoderef and then autoref again). // We have to be careful about the order we're looking at candidates // in here. Consider the case where we're resolving `x.clone()` @@ -568,29 +639,31 @@ pub fn iterate_method_candidates_dyn( // the methods by autoderef order of *receiver types*, not *self // types*. - let deref_chain = autoderef_method_receiver(db, krate, ty); - let mut deref_chains = stdx::slice_tails(&deref_chain); + let mut table = InferenceTable::new(db, env.clone()); + let ty = table.instantiate_canonical(ty.clone()); + let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty); + let deref_chains = stdx::slice_tails(&deref_chain); - deref_chains.try_for_each(|deref_chain| { + let result = deref_chains.zip(adj).try_for_each(|(deref_chain, adj)| { iterate_method_candidates_with_autoref( deref_chain, + adj, db, env.clone(), - krate, traits_in_scope, visible_from_module, name, callback, ) - }) + }); + result } LookupMode::Path => { // No autoderef for path lookups iterate_method_candidates_for_self_ty( ty, db, - env, - krate, + env.clone(), traits_in_scope, visible_from_module, name, @@ -602,27 +675,27 @@ pub fn iterate_method_candidates_dyn( fn iterate_method_candidates_with_autoref( deref_chain: &[Canonical], + first_adjustment: ReceiverAdjustments, db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, - mut callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { let (receiver_ty, rest) = match deref_chain.split_first() { - Some((rec, rest)) => (rec.clone(), rest), + Some((rec, rest)) => (rec, rest), None => { never!("received empty deref-chain"); return ControlFlow::Break(()); } }; iterate_method_candidates_by_receiver( - &receiver_ty, + receiver_ty, + first_adjustment.clone(), &rest, db, env.clone(), - krate, traits_in_scope, visible_from_module, name, @@ -630,17 +703,17 @@ fn iterate_method_candidates_with_autoref( )?; let refed = Canonical { - binders: receiver_ty.binders.clone(), value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone()) .intern(Interner), + binders: receiver_ty.binders.clone(), }; iterate_method_candidates_by_receiver( &refed, + first_adjustment.with_autoref(Mutability::Not), deref_chain, db, env.clone(), - krate, traits_in_scope, visible_from_module, name, @@ -648,16 +721,17 @@ fn iterate_method_candidates_with_autoref( )?; let ref_muted = Canonical { - binders: receiver_ty.binders, - value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value).intern(Interner), + value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone()) + .intern(Interner), + binders: receiver_ty.binders.clone(), }; iterate_method_candidates_by_receiver( &ref_muted, + first_adjustment.with_autoref(Mutability::Mut), deref_chain, db, - env, - krate, + env.clone(), traits_in_scope, visible_from_module, name, @@ -667,14 +741,14 @@ fn iterate_method_candidates_with_autoref( fn iterate_method_candidates_by_receiver( receiver_ty: &Canonical, + receiver_adjustments: ReceiverAdjustments, rest_of_deref_chain: &[Canonical], db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, - mut callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { // We're looking for methods with *receiver* type receiver_ty. These could // be found in any of the derefs of receiver_ty, so we have to go through @@ -686,7 +760,7 @@ fn iterate_method_candidates_by_receiver( env.clone(), name, Some(receiver_ty), - krate, + Some(receiver_adjustments.clone()), visible_from_module, &mut callback, )? @@ -697,10 +771,10 @@ fn iterate_method_candidates_by_receiver( self_ty, db, env.clone(), - krate, traits_in_scope, name, Some(receiver_ty), + Some(receiver_adjustments.clone()), &mut callback, )? } @@ -712,11 +786,10 @@ fn iterate_method_candidates_for_self_ty( self_ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, - mut callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { iterate_inherent_methods( self_ty, @@ -724,24 +797,24 @@ fn iterate_method_candidates_for_self_ty( env.clone(), name, None, - krate, + None, visible_from_module, &mut callback, )?; - iterate_trait_method_candidates(self_ty, db, env, krate, traits_in_scope, name, None, callback) + iterate_trait_method_candidates(self_ty, db, env, traits_in_scope, name, None, None, callback) } fn iterate_trait_method_candidates( self_ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, name: Option<&Name>, receiver_ty: Option<&Canonical>, - callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + receiver_adjustments: Option, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { - let receiver_is_array = matches!(self_ty.value.kind(Interner), chalk_ir::TyKind::Array(..)); + let self_is_array = matches!(self_ty.value.kind(Interner), chalk_ir::TyKind::Array(..)); // if ty is `dyn Trait`, the trait doesn't need to be in scope let inherent_trait = self_ty.value.dyn_trait().into_iter().flat_map(|t| all_super_traits(db.upcast(), t)); @@ -763,10 +836,10 @@ fn iterate_trait_method_candidates( // 2021. // This is to make `[a].into_iter()` not break code with the new `IntoIterator` impl for // arrays. - if data.skip_array_during_method_dispatch && receiver_is_array { + if data.skip_array_during_method_dispatch && self_is_array { // FIXME: this should really be using the edition of the method name's span, in case it // comes from a macro - if db.crate_graph()[krate].edition < Edition::Edition2021 { + if db.crate_graph()[env.krate].edition < Edition::Edition2021 { continue; } } @@ -782,14 +855,13 @@ fn iterate_trait_method_candidates( continue; } if !known_implemented { - let goal = generic_implements_goal(db, env.clone(), t, self_ty.clone()); - if db.trait_solve(krate, goal.cast(Interner)).is_none() { + let goal = generic_implements_goal(db, env.clone(), t, self_ty); + if db.trait_solve(env.krate, goal.cast(Interner)).is_none() { continue 'traits; } } known_implemented = true; - // FIXME: we shouldn't be ignoring the binders here - callback(self_ty, item)? + callback(receiver_adjustments.clone().unwrap_or_default(), item)?; } } ControlFlow::Continue(()) @@ -824,11 +896,11 @@ fn iterate_inherent_methods( env: Arc, name: Option<&Name>, receiver_ty: Option<&Canonical>, - krate: CrateId, + receiver_adjustments: Option, visible_from_module: VisibleFromModule, - callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { - let def_crates = match def_crates(db, &self_ty.value, krate) { + let def_crates = match def_crates(db, &self_ty.value, env.krate) { Some(k) => k, None => return ControlFlow::Continue(()), }; @@ -848,6 +920,7 @@ fn iterate_inherent_methods( env.clone(), name, receiver_ty, + receiver_adjustments.clone(), module, callback, )?; @@ -856,7 +929,17 @@ fn iterate_inherent_methods( for krate in def_crates { let impls = db.inherent_impls_in_crate(krate); - impls_for_self_ty(&impls, self_ty, db, env.clone(), name, receiver_ty, module, callback)?; + impls_for_self_ty( + &impls, + self_ty, + db, + env.clone(), + name, + receiver_ty, + receiver_adjustments.clone(), + module, + callback, + )?; } return ControlFlow::Continue(()); @@ -867,8 +950,9 @@ fn iterate_inherent_methods( env: Arc, name: Option<&Name>, receiver_ty: Option<&Canonical>, + receiver_adjustments: Option, visible_from_module: Option, - callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { let impls_for_self_ty = filter_inherent_impls_for_self_ty(impls, &self_ty.value); for &impl_def in impls_for_self_ty { @@ -889,33 +973,32 @@ fn iterate_inherent_methods( // already happens in `is_valid_candidate` above; if not, we // check it here if receiver_ty.is_none() - && inherent_impl_substs(db, env.clone(), impl_def, self_ty).is_none() + && inherent_impl_substs(db, env.clone(), impl_def, &self_ty).is_none() { cov_mark::hit!(impl_self_type_match_without_receiver); continue; } - let receiver_ty = receiver_ty.unwrap_or(self_ty); - callback(receiver_ty, item)?; + callback(receiver_adjustments.clone().unwrap_or_default(), item)?; } } ControlFlow::Continue(()) } } -/// Returns the self type for the index trait call. +/// Returns the receiver type for the index trait call. pub fn resolve_indexing_op( db: &dyn HirDatabase, - ty: &Canonical, env: Arc, - krate: CrateId, + ty: Canonical, index_trait: TraitId, -) -> Option> { - let ty = InEnvironment { goal: ty.clone(), environment: env.env.clone() }; - let deref_chain = autoderef_method_receiver(db, krate, ty); - for ty in deref_chain { - let goal = generic_implements_goal(db, env.clone(), index_trait, ty.clone()); - if db.trait_solve(krate, goal.cast(Interner)).is_some() { - return Some(ty); +) -> Option { + let mut table = InferenceTable::new(db, env.clone()); + let ty = table.instantiate_canonical(ty); + let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty); + for (ty, adj) in deref_chain.into_iter().zip(adj) { + let goal = generic_implements_goal(db, env.clone(), index_trait, &ty); + if db.trait_solve(env.krate, goal.cast(Interner)).is_some() { + return Some(adj); } } None @@ -1067,11 +1150,10 @@ pub fn implements_trait( ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, trait_: TraitId, ) -> bool { - let goal = generic_implements_goal(db, env, trait_, ty.clone()); - let solution = db.trait_solve(krate, goal.cast(Interner)); + let goal = generic_implements_goal(db, env.clone(), trait_, &ty); + let solution = db.trait_solve(env.krate, goal.cast(Interner)); solution.is_some() } @@ -1080,11 +1162,10 @@ pub fn implements_trait_unique( ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, trait_: TraitId, ) -> bool { - let goal = generic_implements_goal(db, env, trait_, ty.clone()); - let solution = db.trait_solve(krate, goal.cast(Interner)); + let goal = generic_implements_goal(db, env.clone(), trait_, &ty); + let solution = db.trait_solve(env.krate, goal.cast(Interner)); matches!(solution, Some(crate::Solution::Unique(_))) } @@ -1095,11 +1176,11 @@ fn generic_implements_goal( db: &dyn HirDatabase, env: Arc, trait_: TraitId, - self_ty: Canonical, + self_ty: &Canonical, ) -> Canonical> { let mut kinds = self_ty.binders.interned().to_vec(); let trait_ref = TyBuilder::trait_ref(db, trait_) - .push(self_ty.value) + .push(self_ty.value.clone()) .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len()) .build(); kinds.extend( @@ -1117,17 +1198,27 @@ fn generic_implements_goal( } fn autoderef_method_receiver( - db: &dyn HirDatabase, - krate: CrateId, - ty: InEnvironment>, -) -> Vec> { - let mut deref_chain: Vec<_> = autoderef::autoderef(db, Some(krate), ty).collect(); + table: &mut InferenceTable, + ty: Ty, +) -> (Vec>, Vec) { + let (mut deref_chain, mut adjustments): (Vec<_>, Vec<_>) = (Vec::new(), Vec::new()); + let mut autoderef = autoderef::Autoderef::new(table, ty); + while let Some((ty, derefs)) = autoderef.next() { + deref_chain.push(autoderef.table.canonicalize(ty).value); + adjustments.push(ReceiverAdjustments { + autoref: None, + autoderefs: derefs, + unsize_array: false, + }); + } // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!) - if let Some(TyKind::Array(parameters, _)) = deref_chain.last().map(|ty| ty.value.kind(Interner)) - { - let kinds = deref_chain.last().unwrap().binders.clone(); + if let (Some((TyKind::Array(parameters, _), binders)), Some(adj)) = ( + deref_chain.last().map(|ty| (ty.value.kind(Interner), ty.binders.clone())), + adjustments.last().cloned(), + ) { let unsized_ty = TyKind::Slice(parameters.clone()).intern(Interner); - deref_chain.push(Canonical { value: unsized_ty, binders: kinds }) + deref_chain.push(Canonical { value: unsized_ty, binders }); + adjustments.push(ReceiverAdjustments { unsize_array: true, ..adj }); } - deref_chain + (deref_chain, adjustments) } diff --git a/crates/hir_ty/src/tests.rs b/crates/hir_ty/src/tests.rs index 29250dca00cc..7385da56622d 100644 --- a/crates/hir_ty/src/tests.rs +++ b/crates/hir_ty/src/tests.rs @@ -100,6 +100,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour .trim_start_matches("adjustments: ") .split(',') .map(|it| it.trim().to_string()) + .filter(|it| !it.is_empty()) .collect(), ); } else { diff --git a/crates/hir_ty/src/tests/coercion.rs b/crates/hir_ty/src/tests/coercion.rs index dd3b86f05033..c0dddb608ea3 100644 --- a/crates/hir_ty/src/tests/coercion.rs +++ b/crates/hir_ty/src/tests/coercion.rs @@ -242,6 +242,45 @@ fn test() { ); } +#[test] +fn coerce_autoderef_implication_1() { + check_no_mismatches( + r" +//- minicore: deref +struct Foo; +impl core::ops::Deref for Foo { type Target = (); } + +fn takes_ref_foo(x: &Foo) {} +fn test() { + let foo = Foo; + //^^^ type: Foo<{unknown}> + takes_ref_foo(&foo); + + let foo = Foo; + //^^^ type: Foo + let _: &() = &foo; +}", + ); +} + +#[test] +fn coerce_autoderef_implication_2() { + check( + r" +//- minicore: deref +struct Foo; +impl core::ops::Deref for Foo { type Target = (); } + +fn takes_ref_foo(x: &Foo) {} +fn test() { + let foo = Foo; + //^^^ type: Foo<{unknown}> + let _: &u32 = &Foo; + //^^^^ expected &u32, got &Foo<{unknown}> +}", + ); +} + #[test] fn closure_return_coerce() { check_no_mismatches( diff --git a/crates/hir_ty/src/tests/method_resolution.rs b/crates/hir_ty/src/tests/method_resolution.rs index 9c0c00da3b1d..c118ae24cfe3 100644 --- a/crates/hir_ty/src/tests/method_resolution.rs +++ b/crates/hir_ty/src/tests/method_resolution.rs @@ -1460,3 +1460,121 @@ fn main() { "#, ); } + +#[test] +fn deref_fun_1() { + check_types( + r#" +//- minicore: deref + +struct A(T, U); +struct B(T); +struct C(T); + +impl core::ops::Deref for A, u32> { + type Target = B; + fn deref(&self) -> &B { &self.0 } +} +impl core::ops::Deref for B { + type Target = C; + fn deref(&self) -> &C { loop {} } +} + +impl C { + fn thing(&self) -> T { self.0 } +} + +fn make() -> T { loop {} } + +fn test() { + let a1 = A(make(), make()); + let _: usize = (*a1).0; + a1; + //^^ A, u32> + + let a2 = A(make(), make()); + a2.thing(); + //^^^^^^^^^^ isize + a2; + //^^ A, u32> +} +"#, + ); +} + +#[test] +fn deref_fun_2() { + check_types( + r#" +//- minicore: deref + +struct A(T, U); +struct B(T); +struct C(T); + +impl core::ops::Deref for A, u32> { + type Target = B; + fn deref(&self) -> &B { &self.0 } +} +impl core::ops::Deref for B { + type Target = C; + fn deref(&self) -> &C { loop {} } +} + +impl core::ops::Deref for A, i32> { + type Target = C; + fn deref(&self) -> &C { &self.0 } +} + +impl C { + fn thing(&self) -> T { self.0 } +} + +fn make() -> T { loop {} } + +fn test() { + let a1 = A(make(), 1u32); + a1.thing(); + a1; + //^^ A, u32> + + let a2 = A(make(), 1i32); + let _: &str = a2.thing(); + a2; + //^^ A, i32> +} +"#, + ); +} + +#[test] +fn receiver_adjustment_autoref() { + check( + r#" +struct Foo; +impl Foo { + fn foo(&self) {} +} +fn test() { + Foo.foo(); + //^^^ adjustments: Borrow(Ref(Not)) + (&Foo).foo(); + // ^^^^ adjustments: , +} +"#, + ); +} + +#[test] +fn receiver_adjustment_unsize_array() { + // FIXME not quite correct + check( + r#" +//- minicore: slice +fn test() { + let a = [1, 2, 3]; + a.len(); +} //^ adjustments: Pointer(Unsize), Borrow(Ref(Not)) +"#, + ); +} diff --git a/crates/hir_ty/src/tests/traits.rs b/crates/hir_ty/src/tests/traits.rs index c2669646e224..04d8b91e3598 100644 --- a/crates/hir_ty/src/tests/traits.rs +++ b/crates/hir_ty/src/tests/traits.rs @@ -540,6 +540,52 @@ fn test() { ); } +#[test] +fn infer_ops_index_field() { + check_types( + r#" +//- minicore: index +struct Bar; +struct Foo { + field: u32; +} + +impl core::ops::Index for Bar { + type Output = Foo; +} + +fn test() { + let a = Bar; + let b = a[1u32].field; + b; +} //^ u32 +"#, + ); +} + +#[test] +fn infer_ops_index_field_autoderef() { + check_types( + r#" +//- minicore: index +struct Bar; +struct Foo { + field: u32; +} + +impl core::ops::Index for Bar { + type Output = Foo; +} + +fn test() { + let a = Bar; + let b = (&a[1u32]).field; + b; +} //^ u32 +"#, + ); +} + #[test] fn infer_ops_index_int() { check_types( diff --git a/crates/hir_ty/src/traits.rs b/crates/hir_ty/src/traits.rs index bd280ba774d2..b139edbee945 100644 --- a/crates/hir_ty/src/traits.rs +++ b/crates/hir_ty/src/traits.rs @@ -40,8 +40,7 @@ fn create_chalk_solver() -> chalk_recursive::RecursiveSolver { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TraitEnvironment { pub krate: CrateId, - // When we're using Chalk's Ty we can make this a BTreeMap since it's Ord, - // but for now it's too annoying... + // FIXME make this a BTreeMap pub(crate) traits_from_clauses: Vec<(Ty, TraitId)>, pub env: chalk_ir::Environment, } diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index b6d9e4021d9b..f886ff7837b4 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -2,7 +2,9 @@ use hir::Semantics; use ide_db::{ base_db::{FileId, FilePosition}, defs::{Definition, IdentClass}, - helpers::{for_each_break_expr, for_each_tail_expr, node_ext::walk_expr, pick_best_token}, + helpers::{ + for_each_break_and_continue_expr, for_each_tail_expr, node_ext::walk_expr, pick_best_token, + }, search::{FileReference, ReferenceCategory, SearchScope}, RootDatabase, }; @@ -10,7 +12,7 @@ use rustc_hash::FxHashSet; use syntax::{ ast::{self, HasLoopBody}, match_ast, AstNode, - SyntaxKind::{IDENT, INT_NUMBER}, + SyntaxKind::{self, IDENT, INT_NUMBER}, SyntaxNode, SyntaxToken, TextRange, T, }; @@ -66,7 +68,9 @@ pub(crate) fn highlight_related( T![for] if config.break_points && token.parent().and_then(ast::ForExpr::cast).is_some() => { highlight_break_points(token) } - T![break] | T![loop] | T![while] if config.break_points => highlight_break_points(token), + T![break] | T![loop] | T![while] | T![continue] if config.break_points => { + highlight_break_points(token) + } _ if config.references => highlight_references(sema, &syntax, token, file_id), _ => None, } @@ -187,6 +191,7 @@ fn highlight_exit_points( fn highlight_break_points(token: SyntaxToken) -> Option> { fn hl( + cursor_token_kind: SyntaxKind, token: Option, label: Option, body: Option, @@ -197,11 +202,23 @@ fn highlight_break_points(token: SyntaxToken) -> Option> { label.as_ref().map(|it| it.syntax().text_range()), ); highlights.extend(range.map(|range| HighlightedRange { category: None, range })); - for_each_break_expr(label, body, &mut |break_| { - let range = cover_range( - break_.break_token().map(|it| it.text_range()), - break_.lifetime().map(|it| it.syntax().text_range()), - ); + for_each_break_and_continue_expr(label, body, &mut |expr| { + let range: Option = match (cursor_token_kind, expr) { + (T![for] | T![while] | T![loop] | T![break], ast::Expr::BreakExpr(break_)) => { + cover_range( + break_.break_token().map(|it| it.text_range()), + break_.lifetime().map(|it| it.syntax().text_range()), + ) + } + ( + T![for] | T![while] | T![loop] | T![continue], + ast::Expr::ContinueExpr(continue_), + ) => cover_range( + continue_.continue_token().map(|it| it.text_range()), + continue_.lifetime().map(|it| it.syntax().text_range()), + ), + _ => None, + }; highlights.extend(range.map(|range| HighlightedRange { category: None, range })); }); Some(highlights) @@ -210,6 +227,7 @@ fn highlight_break_points(token: SyntaxToken) -> Option> { let lbl = match_ast! { match parent { ast::BreakExpr(b) => b.lifetime(), + ast::ContinueExpr(c) => c.lifetime(), ast::LoopExpr(l) => l.label().and_then(|it| it.lifetime()), ast::ForExpr(f) => f.label().and_then(|it| it.lifetime()), ast::WhileExpr(w) => w.label().and_then(|it| it.lifetime()), @@ -224,19 +242,29 @@ fn highlight_break_points(token: SyntaxToken) -> Option> { } None => true, }; + let token_kind = token.kind(); for anc in token.ancestors().flat_map(ast::Expr::cast) { return match anc { - ast::Expr::LoopExpr(l) if label_matches(l.label()) => { - hl(l.loop_token(), l.label(), l.loop_body().and_then(|it| it.stmt_list())) - } - ast::Expr::ForExpr(f) if label_matches(f.label()) => { - hl(f.for_token(), f.label(), f.loop_body().and_then(|it| it.stmt_list())) - } - ast::Expr::WhileExpr(w) if label_matches(w.label()) => { - hl(w.while_token(), w.label(), w.loop_body().and_then(|it| it.stmt_list())) - } + ast::Expr::LoopExpr(l) if label_matches(l.label()) => hl( + token_kind, + l.loop_token(), + l.label(), + l.loop_body().and_then(|it| it.stmt_list()), + ), + ast::Expr::ForExpr(f) if label_matches(f.label()) => hl( + token_kind, + f.for_token(), + f.label(), + f.loop_body().and_then(|it| it.stmt_list()), + ), + ast::Expr::WhileExpr(w) if label_matches(w.label()) => hl( + token_kind, + w.while_token(), + w.label(), + w.loop_body().and_then(|it| it.stmt_list()), + ), ast::Expr::BlockExpr(e) if e.label().is_some() && label_matches(e.label()) => { - hl(None, e.label(), e.stmt_list()) + hl(token_kind, None, e.label(), e.stmt_list()) } _ => continue, }; @@ -804,6 +832,115 @@ fn foo() { ); } + #[test] + fn test_hl_break_for_but_not_continue() { + check( + r#" +fn foo() { + 'outer: for _ in () { + // ^^^^^^^^^^^ + break; + // ^^^^^ + continue; + 'inner: for _ in () { + break; + continue; + 'innermost: for _ in () { + continue 'outer; + break 'outer; + // ^^^^^^^^^^^^ + continue 'inner; + break 'inner; + } + break$0 'outer; + // ^^^^^^^^^^^^ + continue 'outer; + break; + continue; + } + break; + // ^^^^^ + continue; + } +} +"#, + ); + } + + #[test] + fn test_hl_continue_for_but_not_break() { + check( + r#" +fn foo() { + 'outer: for _ in () { + // ^^^^^^^^^^^ + break; + continue; + // ^^^^^^^^ + 'inner: for _ in () { + break; + continue; + 'innermost: for _ in () { + continue 'outer; + // ^^^^^^^^^^^^^^^ + break 'outer; + continue 'inner; + break 'inner; + } + break 'outer; + continue$0 'outer; + // ^^^^^^^^^^^^^^^ + break; + continue; + } + break; + continue; + // ^^^^^^^^ + } +} +"#, + ); + } + + #[test] + fn test_hl_break_and_continue() { + check( + r#" +fn foo() { + 'outer: fo$0r _ in () { + // ^^^^^^^^^^^ + break; + // ^^^^^ + continue; + // ^^^^^^^^ + 'inner: for _ in () { + break; + continue; + 'innermost: for _ in () { + continue 'outer; + // ^^^^^^^^^^^^^^^ + break 'outer; + // ^^^^^^^^^^^^ + continue 'inner; + break 'inner; + } + break 'outer; + // ^^^^^^^^^^^^ + continue 'outer; + // ^^^^^^^^^^^^^^^ + break; + continue; + } + break; + // ^^^^^ + continue; + // ^^^^^^^^ + } +} +"#, + ); + } + #[test] fn test_hl_break_while() { check( diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index a232ebd4fb7c..df0ca941c99d 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -4583,3 +4583,33 @@ pub struct Foo; "##]], ); } + +#[test] +fn hover_dollar_crate() { + // $crate should be resolved to the right crate name. + + check( + r#" +//- /main.rs crate:main deps:dep +dep::m!(KONST$0); +//- /dep.rs crate:dep +#[macro_export] +macro_rules! m { + ( $name:ident ) => { const $name: $crate::Type = $crate::Type; }; +} + +pub struct Type; +"#, + expect![[r#" + *KONST* + + ```rust + main + ``` + + ```rust + const KONST: dep::Type = $crate::Type + ``` + "#]], + ); +} diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index 38edda9c1c08..c3ac84e58c6f 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -1536,11 +1536,13 @@ trait Trait { ) } + // FIXME: import is classified as function #[test] fn attr() { check( r#" //- proc_macros: identity +use proc_macros::identity; #[proc_macros::$0identity] fn func() {} @@ -1548,7 +1550,7 @@ fn func() {} expect![[r#" identity Attribute FileId(1) 1..107 32..40 - FileId(0) 16..24 + FileId(0) 43..51 "#]], ); check( @@ -1564,12 +1566,31 @@ fn func$0() {} ); } + // FIXME: import is classified as function + #[test] + fn proc_macro() { + check( + r#" +//- proc_macros: mirror +use proc_macros::mirror; + +mirror$0! {} +"#, + expect![[r#" + mirror Macro FileId(1) 1..77 22..28 + + FileId(0) 26..32 + "#]], + ) + } + #[test] fn derive() { check( r#" //- proc_macros: derive_identity //- minicore: derive +use proc_macros::DeriveIdentity; #[derive(proc_macros::DeriveIdentity$0)] struct Foo; @@ -1577,8 +1598,20 @@ struct Foo; expect![[r#" derive_identity Derive FileId(2) 1..107 45..60 - FileId(0) 23..37 + FileId(0) 17..31 + FileId(0) 56..70 "#]], - ) + ); + check( + r#" +#[proc_macro_derive(Derive, attributes(x))] +pub fn deri$0ve(_stream: TokenStream) -> TokenStream {} +"#, + expect![[r#" + derive Derive FileId(0) 0..97 51..57 + + (no references) + "#]], + ); } } diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index 7d92c5051b14..1b2fc9c635d3 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -183,7 +183,8 @@ pub(crate) fn highlight( traverse( &mut hl, &sema, - InFile::new(file_id.into(), &root), + file_id, + &root, sema.scope(&root).krate(), range_to_highlight, syntactic_name_ref_highlighting, @@ -194,11 +195,13 @@ pub(crate) fn highlight( fn traverse( hl: &mut Highlights, sema: &Semantics, - root: InFile<&SyntaxNode>, + file_id: FileId, + root: &SyntaxNode, krate: Option, range_to_highlight: TextRange, syntactic_name_ref_highlighting: bool, ) { + let is_unlinked = sema.to_module_def(file_id).is_none(); let mut bindings_shadow_count: FxHashMap = FxHashMap::default(); let mut current_macro_call: Option = None; @@ -209,7 +212,7 @@ fn traverse( // Walk all nodes, keeping track of whether we are inside a macro or not. // If in macro, expand it first and highlight the expanded code. - for event in root.value.preorder_with_tokens() { + for event in root.preorder_with_tokens() { let range = match &event { WalkEvent::Enter(it) | WalkEvent::Leave(it) => it.text_range(), }; @@ -283,7 +286,7 @@ fn traverse( WalkEvent::Enter(it) => it, WalkEvent::Leave(NodeOrToken::Token(_)) => continue, WalkEvent::Leave(NodeOrToken::Node(node)) => { - inject::doc_comment(hl, sema, root.with_value(&node)); + inject::doc_comment(hl, sema, InFile::new(file_id.into(), &node)); continue; } }; @@ -378,6 +381,11 @@ fn traverse( NodeOrToken::Token(token) => highlight::token(sema, token).zip(Some(None)), }; if let Some((mut highlight, binding_hash)) = element { + if is_unlinked && highlight.tag == HlTag::UnresolvedReference { + // do not emit unresolved references if the file is unlinked + // let the editor do its highlighting for these tokens instead + continue; + } if inside_attribute { highlight |= HlMod::Attribute } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html b/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html index 645e6c589dde..b035e786d352 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html @@ -58,5 +58,4 @@ impl t for foo { pub fn is_static() {} pub fn is_not_static(&self) {} -} - \ No newline at end of file +} \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html b/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html new file mode 100644 index 000000000000..9fe2b50cde79 --- /dev/null +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html @@ -0,0 +1,57 @@ + + +
#[allow(dead_code)]
+#[rustfmt::skip]
+#[proc_macros::identity]
+#[derive(Copy)]
+/// This is a doc comment
+// This is a normal comment
+/// This is a doc comment
+#[derive(Copy)]
+// This is another normal comment
+/// This is another doc comment
+// This is another normal comment
+#[derive(Copy)]
+// The reason for these being here is to test AttrIds
+struct Foo;
\ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html b/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html index 2f96a1be85ca..3e20b2f3512d 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html @@ -46,5 +46,4 @@ fn main() { let foo = Some(92); let nums = iter::repeat(foo.unwrap()); -} - \ No newline at end of file +} \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlighting.html b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html similarity index 75% rename from crates/ide/src/syntax_highlighting/test_data/highlighting.html rename to crates/ide/src/syntax_highlighting/test_data/highlight_general.html index 793f554c6379..22bdfffa3ef4 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlighting.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html @@ -44,9 +44,6 @@
use inner::{self as inner_mod};
 mod inner {}
 
-#[allow()]
-#[rustfmt::skip]
-#[proc_macros::identity]
 pub mod ops {
     #[lang = "fn_once"]
     pub trait FnOnce<Args> {}
@@ -58,11 +55,8 @@
     pub trait Fn<Args>: FnMut<Args> {}
 }
 
-proc_macros::mirror! {
-    {
-        ,i32 :x pub
-        ,i32 :y pub
-    } Foo struct
+struct Foo {
+    x: u32,
 }
 
 trait Bar where Self: {
@@ -71,7 +65,7 @@
 
 impl Bar for Foo where Self: {
     fn bar(&self) -> i32 {
-        self.x
+        self.x
     }
 }
 
@@ -81,26 +75,17 @@
     }
 
     fn qux(&mut self) {
-        self.x = 0;
+        self.x = 0;
     }
 
     fn quop(&self) -> i32 {
-        self.x
+        self.x
     }
 }
 
 use self::FooCopy::{self as BarCopy};
 
 #[derive(Copy)]
-/// This is a doc comment
-// This is a normal comment
-/// This is a doc comment
-#[derive(Copy)]
-// This is another normal comment
-/// This is another doc comment
-// This is another normal comment
-#[derive(Copy)]
-// The reason for these being here is to test AttrIds
 struct FooCopy {
     x: u32,
 }
@@ -146,68 +131,19 @@
     let bar = foobar();
 }
 
-macro_rules! def_fn {
-    ($($tt:tt)*) => {$($tt)*}
-}
-
-def_fn! {
-    fn bar() -> u32 {
-        100
-    }
-}
-
-macro_rules! dont_color_me_braces {
-    () => {0}
-}
-
-macro_rules! noop {
-    ($expr:expr) => {
-        $expr
-    }
-}
-
-macro_rules! keyword_frag {
-    ($type:ty) => ($type)
-}
-
-macro with_args($i:ident) {
-    $i
-}
-
-macro without_args {
-    ($i:ident) => {
-        $i
-    }
-}
-
 // comment
 fn main() {
-    println!("Hello, {}!", 92);
-    dont_color_me_braces!();
-
-    let mut vec = Vec::new();
-    if true {
-        let x = 92;
-        vec.push(Foo { x, y: 1 });
-    }
-
-    for e in vec {
-        // Do nothing
-    }
-
-    noop!(noop!(1));
-
     let mut x = 42;
     x += 1;
     let y = &mut x;
     let z = &y;
 
-    let Foo { x: z, y } = Foo { x: z, y };
+    let Foo { x: z, y } = Foo { x: z, y };
 
     y;
 
-    let mut foo = Foo { x, y: x };
-    let foo2 = Foo { x, y: x };
+    let mut foo = Foo { x, y: x };
+    let foo2 = Foo { x, y: x };
     foo.quop();
     foo.qux();
     foo.baz(foo2);
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html b/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
index 1713306dae2a..023e791f8bb8 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
@@ -45,10 +45,17 @@
 
 fn main() {
     fixture(r#"
-        trait Foo {
-            fn foo() {
-                println!("2 + 2 = {}", 4);
-            }
-        }"#
+trait Foo {
+    fn foo() {
+        println!("2 + 2 = {}", 4);
+    }
+}"#
+    );
+    fixture(r"
+fn foo() {
+    foo($0{
+        92
+    }$0)
+}"
     );
 }
\ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html new file mode 100644 index 000000000000..c3f71d443f67 --- /dev/null +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -0,0 +1,88 @@ + + +
proc_macros::mirror! {
+    {
+        ,i32 :x pub
+        ,i32 :y pub
+    } Foo struct
+}
+macro_rules! def_fn {
+    ($($tt:tt)*) => {$($tt)*}
+}
+
+def_fn! {
+    fn bar() -> u32 {
+        100
+    }
+}
+
+macro_rules! dont_color_me_braces {
+    () => {0}
+}
+
+macro_rules! noop {
+    ($expr:expr) => {
+        $expr
+    }
+}
+
+macro_rules! keyword_frag {
+    ($type:ty) => ($type)
+}
+
+macro with_args($i:ident) {
+    $i
+}
+
+macro without_args {
+    ($i:ident) => {
+        $i
+    }
+}
+
+fn main() {
+    println!("Hello, {}!", 92);
+    dont_color_me_braces!();
+    noop!(noop!(1));
+}
\ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/rainbow_highlighting.html b/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html similarity index 100% rename from crates/ide/src/syntax_highlighting/test_data/rainbow_highlighting.html rename to crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html diff --git a/crates/ide/src/syntax_highlighting/test_data/injection.html b/crates/ide/src/syntax_highlighting/test_data/injection.html deleted file mode 100644 index 2e9ad144f19a..000000000000 --- a/crates/ide/src/syntax_highlighting/test_data/injection.html +++ /dev/null @@ -1,53 +0,0 @@ - - -
fn f(ra_fixture: &str) {}
-fn main() {
-    f(r"
-fn foo() {
-    foo($0{
-        92
-    }$0)
-}");
-}
-    
\ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 4beab9909c41..ec50fde3562c 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -6,19 +6,98 @@ use test_utils::{bench, bench_fixture, skip_slow_tests, AssertLinear}; use crate::{fixture, FileRange, HlTag, TextRange}; +#[test] +fn attributes() { + check_highlighting( + r#" +//- proc_macros: identity +//- minicore: derive, copy +#[allow(dead_code)] +#[rustfmt::skip] +#[proc_macros::identity] +#[derive(Copy)] +/// This is a doc comment +// This is a normal comment +/// This is a doc comment +#[derive(Copy)] +// This is another normal comment +/// This is another doc comment +// This is another normal comment +#[derive(Copy)] +// The reason for these being here is to test AttrIds +struct Foo; +"#, + expect_file!["./test_data/highlight_attributes.html"], + false, + ); +} +#[test] +fn macros() { + check_highlighting( + r#" +//- proc_macros: mirror +proc_macros::mirror! { + { + ,i32 :x pub + ,i32 :y pub + } Foo struct +} +macro_rules! def_fn { + ($($tt:tt)*) => {$($tt)*} +} + +def_fn! { + fn bar() -> u32 { + 100 + } +} + +macro_rules! dont_color_me_braces { + () => {0} +} + +macro_rules! noop { + ($expr:expr) => { + $expr + } +} + +macro_rules! keyword_frag { + ($type:ty) => ($type) +} + +macro with_args($i:ident) { + $i +} + +macro without_args { + ($i:ident) => { + $i + } +} + +fn main() { + println!("Hello, {}!", 92); + dont_color_me_braces!(); + noop!(noop!(1)); +} +"#, + expect_file!["./test_data/highlight_macros.html"], + false, + ); +} + +/// If what you want to test feels like a specific entity consider making a new test instead, +/// this test fixture here in fact should shrink instead of grow ideally. #[test] fn test_highlighting() { check_highlighting( r#" -//- proc_macros: identity, mirror //- minicore: derive, copy //- /main.rs crate:main deps:foo use inner::{self as inner_mod}; mod inner {} -#[allow()] -#[rustfmt::skip] -#[proc_macros::identity] pub mod ops { #[lang = "fn_once"] pub trait FnOnce {} @@ -30,11 +109,8 @@ pub mod ops { pub trait Fn: FnMut {} } -proc_macros::mirror! { - { - ,i32 :x pub - ,i32 :y pub - } Foo struct +struct Foo { + x: u32, } trait Bar where Self: { @@ -64,15 +140,6 @@ impl Foo { use self::FooCopy::{self as BarCopy}; #[derive(Copy)] -/// This is a doc comment -// This is a normal comment -/// This is a doc comment -#[derive(Copy)] -// This is another normal comment -/// This is another doc comment -// This is another normal comment -#[derive(Copy)] -// The reason for these being here is to test AttrIds struct FooCopy { x: u32, } @@ -118,57 +185,8 @@ fn foo() { let bar = foobar(); } -macro_rules! def_fn { - ($($tt:tt)*) => {$($tt)*} -} - -def_fn! { - fn bar() -> u32 { - 100 - } -} - -macro_rules! dont_color_me_braces { - () => {0} -} - -macro_rules! noop { - ($expr:expr) => { - $expr - } -} - -macro_rules! keyword_frag { - ($type:ty) => ($type) -} - -macro with_args($i:ident) { - $i -} - -macro without_args { - ($i:ident) => { - $i - } -} - // comment fn main() { - println!("Hello, {}!", 92); - dont_color_me_braces!(); - - let mut vec = Vec::new(); - if true { - let x = 92; - vec.push(Foo { x, y: 1 }); - } - - for e in vec { - // Do nothing - } - - noop!(noop!(1)); - let mut x = 42; x += 1; let y = &mut x; @@ -288,161 +306,12 @@ macro_rules! die { panic!(); }; } -"# - .trim(), - expect_file!["./test_data/highlighting.html"], - false, - ); -} - -#[test] -fn test_rainbow_highlighting() { - check_highlighting( - r#" -fn main() { - let hello = "hello"; - let x = hello.to_string(); - let y = hello.to_string(); - - let x = "other color please!"; - let y = x.to_string(); -} - -fn bar() { - let mut hello = "hello"; -} -"# - .trim(), - expect_file!["./test_data/rainbow_highlighting.html"], - true, - ); -} - -#[test] -fn benchmark_syntax_highlighting_long_struct() { - if skip_slow_tests() { - return; - } - - let fixture = bench_fixture::big_struct(); - let (analysis, file_id) = fixture::file(&fixture); - - let hash = { - let _pt = bench("syntax highlighting long struct"); - analysis - .highlight(file_id) - .unwrap() - .iter() - .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct)) - .count() - }; - assert_eq!(hash, 2001); -} - -#[test] -fn syntax_highlighting_not_quadratic() { - if skip_slow_tests() { - return; - } - - let mut al = AssertLinear::default(); - while al.next_round() { - for i in 6..=10 { - let n = 1 << i; - - let fixture = bench_fixture::big_struct_n(n); - let (analysis, file_id) = fixture::file(&fixture); - - let time = Instant::now(); - - let hash = analysis - .highlight(file_id) - .unwrap() - .iter() - .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct)) - .count(); - assert!(hash > n as usize); - - let elapsed = time.elapsed(); - al.sample(n as f64, elapsed.as_millis() as f64); - } - } -} - -#[test] -fn benchmark_syntax_highlighting_parser() { - if skip_slow_tests() { - return; - } - - let fixture = bench_fixture::glorious_old_parser(); - let (analysis, file_id) = fixture::file(&fixture); - - let hash = { - let _pt = bench("syntax highlighting parser"); - analysis - .highlight(file_id) - .unwrap() - .iter() - .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function)) - .count() - }; - assert_eq!(hash, 1616); -} - -#[test] -fn test_ranges() { - let (analysis, file_id) = fixture::file( - r#" -#[derive(Clone, Debug)] -struct Foo { - pub x: i32, - pub y: i32, -} "#, - ); - - // The "x" - let highlights = &analysis - .highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) }) - .unwrap(); - - assert_eq!(&highlights[0].highlight.to_string(), "field.declaration.public"); -} - -#[test] -fn test_flattening() { - check_highlighting( - r##" -fn fixture(ra_fixture: &str) {} - -fn main() { - fixture(r#" - trait Foo { - fn foo() { - println!("2 + 2 = {}", 4); - } - }"# - ); -}"## - .trim(), - expect_file!["./test_data/highlight_injection.html"], + expect_file!["./test_data/highlight_general.html"], false, ); } -#[test] -fn ranges_sorted() { - let (analysis, file_id) = fixture::file( - r#" -#[foo(bar = "bar")] -macro_rules! test {} -}"# - .trim(), - ); - let _ = analysis.highlight(file_id).unwrap(); -} - #[test] fn test_string_highlighting() { // The format string detection is based on macro-expansion, @@ -555,8 +424,7 @@ fn main() { toho!("{}fmt", 0); asm!("mov eax, {0}"); format_args!(concat!("{}"), "{}"); -}"# - .trim(), +}"#, expect_file!["./test_data/highlight_strings.html"], false, ); @@ -630,8 +498,7 @@ fn main() { packed.a.calls_autoref(); } } -"# - .trim(), +"#, expect_file!["./test_data/highlight_unsafe.html"], false, ); @@ -781,8 +648,7 @@ pub fn block_comments2() {} //! ``` //! fn test() {} //! ``` -"# - .trim(), +"#, expect_file!["./test_data/highlight_doctest.html"], false, ); @@ -792,14 +658,14 @@ pub fn block_comments2() {} fn test_extern_crate() { check_highlighting( r#" - //- /main.rs crate:main deps:std,alloc - extern crate std; - extern crate alloc as abc; - //- /std/lib.rs crate:std - pub struct S; - //- /alloc/lib.rs crate:alloc - pub struct A - "#, +//- /main.rs crate:main deps:std,alloc +extern crate std; +extern crate alloc as abc; +//- /std/lib.rs crate:std +pub struct S; +//- /alloc/lib.rs crate:alloc +pub struct A +"#, expect_file!["./test_data/highlight_extern_crate.html"], false, ); @@ -809,41 +675,41 @@ fn test_extern_crate() { fn test_crate_root() { check_highlighting( r#" - //- minicore: iterators - //- /main.rs crate:main deps:foo - extern crate foo; - use core::iter; +//- minicore: iterators +//- /main.rs crate:main deps:foo +extern crate foo; +use core::iter; - pub const NINETY_TWO: u8 = 92; +pub const NINETY_TWO: u8 = 92; - use foo as foooo; +use foo as foooo; - pub(crate) fn main() { - let baz = iter::repeat(92); - } +pub(crate) fn main() { + let baz = iter::repeat(92); +} - mod bar { - pub(in super) const FORTY_TWO: u8 = 42; +mod bar { + pub(in super) const FORTY_TWO: u8 = 42; - mod baz { - use super::super::NINETY_TWO; - use crate::foooo::Point; + mod baz { + use super::super::NINETY_TWO; + use crate::foooo::Point; - pub(in super::super) const TWENTY_NINE: u8 = 29; - } - } - //- /foo.rs crate:foo - struct Point { - x: u8, - y: u8, - } + pub(in super::super) const TWENTY_NINE: u8 = 29; + } +} +//- /foo.rs crate:foo +struct Point { + x: u8, + y: u8, +} - mod inner { - pub(super) fn swap(p: crate::Point) -> crate::Point { - crate::Point { x: p.y, y: p.x } - } - } - "#, +mod inner { + pub(super) fn swap(p: crate::Point) -> crate::Point { + crate::Point { x: p.y, y: p.x } + } +} +"#, expect_file!["./test_data/highlight_crate_root.html"], false, ); @@ -853,14 +719,14 @@ fn test_crate_root() { fn test_default_library() { check_highlighting( r#" - //- minicore: option, iterators - use core::iter; +//- minicore: option, iterators +use core::iter; - fn main() { - let foo = Some(92); - let nums = iter::repeat(foo.unwrap()); - } - "#, +fn main() { + let foo = Some(92); + let nums = iter::repeat(foo.unwrap()); +} +"#, expect_file!["./test_data/highlight_default_library.html"], false, ); @@ -888,7 +754,7 @@ impl t for foo { pub fn is_static() {} pub fn is_not_static(&self) {} } - "#, +"#, expect_file!["./test_data/highlight_assoc_functions.html"], false, ) @@ -898,26 +764,161 @@ impl t for foo { fn test_injection() { check_highlighting( r##" -fn f(ra_fixture: &str) {} +fn fixture(ra_fixture: &str) {} + fn main() { - f(r" + fixture(r#" +trait Foo { + fn foo() { + println!("2 + 2 = {}", 4); + } +}"# + ); + fixture(r" fn foo() { foo(\$0{ 92 }\$0) -}"); +}" + ); } - "##, - expect_file!["./test_data/injection.html"], +"##, + expect_file!["./test_data/highlight_injection.html"], false, ); } +#[test] +fn test_rainbow_highlighting() { + check_highlighting( + r#" +fn main() { + let hello = "hello"; + let x = hello.to_string(); + let y = hello.to_string(); + + let x = "other color please!"; + let y = x.to_string(); +} + +fn bar() { + let mut hello = "hello"; +} +"#, + expect_file!["./test_data/highlight_rainbow.html"], + true, + ); +} + +#[test] +fn test_ranges() { + let (analysis, file_id) = fixture::file( + r#" +#[derive(Clone, Debug)] +struct Foo { + pub x: i32, + pub y: i32, +} +"#, + ); + + // The "x" + let highlights = &analysis + .highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) }) + .unwrap(); + + assert_eq!(&highlights[0].highlight.to_string(), "field.declaration.public"); +} + +#[test] +fn ranges_sorted() { + let (analysis, file_id) = fixture::file( + r#" +#[foo(bar = "bar")] +macro_rules! test {} +}"# + .trim(), + ); + let _ = analysis.highlight(file_id).unwrap(); +} + /// Highlights the code given by the `ra_fixture` argument, renders the /// result as HTML, and compares it with the HTML file given as `snapshot`. /// Note that the `snapshot` file is overwritten by the rendered HTML. fn check_highlighting(ra_fixture: &str, expect: ExpectFile, rainbow: bool) { - let (analysis, file_id) = fixture::file(ra_fixture); + let (analysis, file_id) = fixture::file(ra_fixture.trim()); let actual_html = &analysis.highlight_as_html(file_id, rainbow).unwrap(); expect.assert_eq(actual_html) } + +#[test] +fn benchmark_syntax_highlighting_long_struct() { + if skip_slow_tests() { + return; + } + + let fixture = bench_fixture::big_struct(); + let (analysis, file_id) = fixture::file(&fixture); + + let hash = { + let _pt = bench("syntax highlighting long struct"); + analysis + .highlight(file_id) + .unwrap() + .iter() + .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct)) + .count() + }; + assert_eq!(hash, 2001); +} + +#[test] +fn syntax_highlighting_not_quadratic() { + if skip_slow_tests() { + return; + } + + let mut al = AssertLinear::default(); + while al.next_round() { + for i in 6..=10 { + let n = 1 << i; + + let fixture = bench_fixture::big_struct_n(n); + let (analysis, file_id) = fixture::file(&fixture); + + let time = Instant::now(); + + let hash = analysis + .highlight(file_id) + .unwrap() + .iter() + .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct)) + .count(); + assert!(hash > n as usize); + + let elapsed = time.elapsed(); + al.sample(n as f64, elapsed.as_millis() as f64); + } + } +} + +#[test] +fn benchmark_syntax_highlighting_parser() { + if skip_slow_tests() { + return; + } + + let fixture = bench_fixture::glorious_old_parser(); + let (analysis, file_id) = fixture::file(&fixture); + + let hash = { + let _pt = bench("syntax highlighting parser"); + analysis + .highlight(file_id) + .unwrap() + .iter() + .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function)) + .count() + }; + assert_eq!(hash, 1616); +} diff --git a/crates/ide_assists/src/handlers/add_missing_match_arms.rs b/crates/ide_assists/src/handlers/add_missing_match_arms.rs index eeed0386ad6a..ec8fbb497451 100644 --- a/crates/ide_assists/src/handlers/add_missing_match_arms.rs +++ b/crates/ide_assists/src/handlers/add_missing_match_arms.rs @@ -1,11 +1,11 @@ use std::iter::{self, Peekable}; use either::Either; -use hir::{Adt, HasSource, ModuleDef, Semantics}; +use hir::{Adt, Crate, HasAttrs, HasSource, ModuleDef, Semantics}; use ide_db::helpers::{mod_path_to_ast, FamousDefs}; use ide_db::RootDatabase; use itertools::Itertools; -use syntax::ast::{self, make, AstNode, HasName, MatchArm, MatchArmList, MatchExpr, Pat}; +use syntax::ast::{self, make, AstNode, HasName, MatchArmList, MatchExpr, Pat}; use crate::{ utils::{self, render_snippet, Cursor}, @@ -52,36 +52,45 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext) -> let expr = match_expr.expr()?; - let mut arms: Vec = match_arm_list.arms().collect(); - if let [arm] = arms.as_slice() { - if let Some(Pat::WildcardPat(..)) = arm.pat() { - arms.clear(); - } - } + let mut has_catch_all_arm = false; - let top_lvl_pats: Vec<_> = arms - .iter() - .filter_map(ast::MatchArm::pat) - .flat_map(|pat| match pat { - // Special case OrPat as separate top-level pats - Pat::OrPat(or_pat) => Either::Left(or_pat.pats()), - _ => Either::Right(iter::once(pat)), + let top_lvl_pats: Vec<_> = match_arm_list + .arms() + .filter_map(|arm| Some((arm.pat()?, arm.guard().is_some()))) + .flat_map(|(pat, has_guard)| { + match pat { + // Special case OrPat as separate top-level pats + Pat::OrPat(or_pat) => Either::Left(or_pat.pats()), + _ => Either::Right(iter::once(pat)), + } + .map(move |pat| (pat, has_guard)) + }) + .map(|(pat, has_guard)| { + has_catch_all_arm |= !has_guard && matches!(pat, Pat::WildcardPat(_)); + pat }) // Exclude top level wildcards so that they are expanded by this assist, retains status quo in #8129. .filter(|pat| !matches!(pat, Pat::WildcardPat(_))) .collect(); let module = ctx.sema.scope(expr.syntax()).module()?; + let (mut missing_pats, is_non_exhaustive): ( + Peekable>>, + bool, + ) = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) { + let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db()); - let mut missing_pats: Peekable>> = if let Some(enum_def) = - resolve_enum_def(&ctx.sema, &expr) - { let variants = enum_def.variants(ctx.db()); let missing_pats = variants .into_iter() - .filter_map(|variant| build_pat(ctx.db(), module, variant)) - .filter(|variant_pat| is_variant_missing(&top_lvl_pats, variant_pat)); + .filter_map(|variant| { + Some(( + build_pat(ctx.db(), module, variant)?, + variant.should_be_hidden(ctx.db(), module.krate()), + )) + }) + .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat)); let option_enum = FamousDefs(&ctx.sema, Some(module.krate())).core_option_Option().map(lift_enum); @@ -92,8 +101,11 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext) -> } else { Box::new(missing_pats) }; - missing_pats.peekable() + (missing_pats.peekable(), is_non_exhaustive) } else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr) { + let is_non_exhaustive = + enum_defs.iter().any(|enum_def| enum_def.is_non_exhaustive(ctx.db())); + let mut n_arms = 1; let variants_of_enums: Vec> = enum_defs .into_iter() @@ -117,17 +129,23 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext) -> .multi_cartesian_product() .inspect(|_| cov_mark::hit!(add_missing_match_arms_lazy_computation)) .map(|variants| { + let is_hidden = variants + .iter() + .any(|variant| variant.should_be_hidden(ctx.db(), module.krate())); let patterns = variants.into_iter().filter_map(|variant| build_pat(ctx.db(), module, variant)); - ast::Pat::from(make::tuple_pat(patterns)) + + (ast::Pat::from(make::tuple_pat(patterns)), is_hidden) }) - .filter(|variant_pat| is_variant_missing(&top_lvl_pats, variant_pat)); - (Box::new(missing_pats) as Box>).peekable() + .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat)); + ((Box::new(missing_pats) as Box>).peekable(), is_non_exhaustive) } else { return None; }; - if missing_pats.peek().is_none() { + let mut needs_catch_all_arm = is_non_exhaustive && !has_catch_all_arm; + + if !needs_catch_all_arm && missing_pats.peek().is_none() { return None; } @@ -138,8 +156,10 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext) -> |builder| { let new_match_arm_list = match_arm_list.clone_for_update(); let missing_arms = missing_pats - .map(|pat| make::match_arm(iter::once(pat), None, make::ext::expr_todo())) - .map(|it| it.clone_for_update()); + .map(|(pat, hidden)| { + (make::match_arm(iter::once(pat), None, make::ext::expr_todo()), hidden) + }) + .map(|(it, hidden)| (it.clone_for_update(), hidden)); let catch_all_arm = new_match_arm_list .arms() @@ -159,7 +179,22 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext) -> } } let mut first_new_arm = None; - for arm in missing_arms { + for (arm, hidden) in missing_arms { + if hidden { + needs_catch_all_arm = !has_catch_all_arm; + } else { + first_new_arm.get_or_insert_with(|| arm.clone()); + new_match_arm_list.add_arm(arm); + } + } + if needs_catch_all_arm && !has_catch_all_arm { + cov_mark::hit!(added_wildcard_pattern); + let arm = make::match_arm( + iter::once(make::wildcard_pat().into()), + None, + make::ext::expr_todo(), + ) + .clone_for_update(); first_new_arm.get_or_insert_with(|| arm.clone()); new_match_arm_list.add_arm(arm); } @@ -250,11 +285,29 @@ enum ExtendedVariant { Variant(hir::Variant), } +impl ExtendedVariant { + fn should_be_hidden(self, db: &RootDatabase, krate: Crate) -> bool { + match self { + ExtendedVariant::Variant(var) => { + var.attrs(db).has_doc_hidden() && var.module(db).krate() != krate + } + _ => false, + } + } +} + fn lift_enum(e: hir::Enum) -> ExtendedEnum { ExtendedEnum::Enum(e) } impl ExtendedEnum { + fn is_non_exhaustive(self, db: &RootDatabase) -> bool { + match self { + ExtendedEnum::Enum(e) => e.attrs(db).by_key("non_exhaustive").exists(), + _ => false, + } + } + fn variants(self, db: &RootDatabase) -> Vec { match self { ExtendedEnum::Enum(e) => { @@ -1280,6 +1333,352 @@ fn foo(t: bool) { $0true => todo!(), false => todo!(), } +}"#, + ); + } + + #[test] + fn does_not_fill_hidden_variants() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + } +} +//- /e.rs crate:e +pub enum E { A, #[doc(hidden)] B, } +"#, + r#" +fn foo(t: ::e::E) { + match t { + $0e::E::A => todo!(), + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn does_not_fill_hidden_variants_tuple() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: (bool, ::e::E)) { + match $0t { + } +} +//- /e.rs crate:e +pub enum E { A, #[doc(hidden)] B, } +"#, + r#" +fn foo(t: (bool, ::e::E)) { + match t { + $0(true, e::E::A) => todo!(), + (false, e::E::A) => todo!(), + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn fills_wildcard_with_only_hidden_variants() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + } +} +//- /e.rs crate:e +pub enum E { #[doc(hidden)] A, } +"#, + r#" +fn foo(t: ::e::E) { + match t { + ${0:_} => todo!(), + } +} +"#, + ); + } + + #[test] + fn does_not_fill_wildcard_when_hidden_variants_are_explicit() { + check_assist_not_applicable( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + e::E::A => todo!(), + } +} +//- /e.rs crate:e +pub enum E { #[doc(hidden)] A, } +"#, + ); + } + + // FIXME: I don't think the assist should be applicable in this case + #[test] + fn does_not_fill_wildcard_with_wildcard() { + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + _ => todo!(), + } +} +//- /e.rs crate:e +pub enum E { #[doc(hidden)] A, } +"#, + r#" +fn foo(t: ::e::E) { + match t { + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn fills_wildcard_on_non_exhaustive_with_explicit_matches() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + e::E::A => todo!(), + } +} +//- /e.rs crate:e +#[non_exhaustive] +pub enum E { A, } +"#, + r#" +fn foo(t: ::e::E) { + match t { + e::E::A => todo!(), + ${0:_} => todo!(), + } +} +"#, + ); + } + + #[test] + fn fills_wildcard_on_non_exhaustive_without_matches() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + } +} +//- /e.rs crate:e +#[non_exhaustive] +pub enum E { A, } +"#, + r#" +fn foo(t: ::e::E) { + match t { + $0e::E::A => todo!(), + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn fills_wildcard_on_non_exhaustive_with_doc_hidden() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + } +} +//- /e.rs crate:e +#[non_exhaustive] +pub enum E { A, #[doc(hidden)] B }"#, + r#" +fn foo(t: ::e::E) { + match t { + $0e::E::A => todo!(), + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn fills_wildcard_on_non_exhaustive_with_doc_hidden_with_explicit_arms() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + e::E::A => todo!(), + } +} +//- /e.rs crate:e +#[non_exhaustive] +pub enum E { A, #[doc(hidden)] B }"#, + r#" +fn foo(t: ::e::E) { + match t { + e::E::A => todo!(), + ${0:_} => todo!(), + } +} +"#, + ); + } + + #[test] + fn fill_wildcard_with_partial_wildcard() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E, b: bool) { + match $0t { + _ if b => todo!(), + } +} +//- /e.rs crate:e +pub enum E { #[doc(hidden)] A, }"#, + r#" +fn foo(t: ::e::E, b: bool) { + match t { + _ if b => todo!(), + ${0:_} => todo!(), + } +} +"#, + ); + } + + #[test] + fn does_not_fill_wildcard_with_partial_wildcard_and_wildcard() { + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E, b: bool) { + match $0t { + _ if b => todo!(), + _ => todo!(), + } +} +//- /e.rs crate:e +pub enum E { #[doc(hidden)] A, }"#, + r#" +fn foo(t: ::e::E, b: bool) { + match t { + _ if b => todo!(), + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn non_exhaustive_doc_hidden_tuple_fills_wildcard() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + } +} +//- /e.rs crate:e +#[non_exhaustive] +pub enum E { A, #[doc(hidden)] B, }"#, + r#" +fn foo(t: ::e::E) { + match t { + $0e::E::A => todo!(), + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn ignores_doc_hidden_for_crate_local_enums() { + check_assist( + add_missing_match_arms, + r#" +enum E { A, #[doc(hidden)] B, } + +fn foo(t: E) { + match $0t { + } +}"#, + r#" +enum E { A, #[doc(hidden)] B, } + +fn foo(t: E) { + match t { + $0E::A => todo!(), + E::B => todo!(), + } +}"#, + ); + } + + #[test] + fn ignores_doc_hidden_for_crate_local_enums_but_not_non_exhaustive() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +#[non_exhaustive] +enum E { A, #[doc(hidden)] B, } + +fn foo(t: E) { + match $0t { + } +}"#, + r#" +#[non_exhaustive] +enum E { A, #[doc(hidden)] B, } + +fn foo(t: E) { + match t { + $0E::A => todo!(), + E::B => todo!(), + _ => todo!(), + } }"#, ); } diff --git a/crates/ide_assists/src/handlers/convert_iter_for_each_to_for.rs b/crates/ide_assists/src/handlers/convert_iter_for_each_to_for.rs index 0d2daa8dc30c..4a4ad984db14 100644 --- a/crates/ide_assists/src/handlers/convert_iter_for_each_to_for.rs +++ b/crates/ide_assists/src/handlers/convert_iter_for_each_to_for.rs @@ -154,11 +154,11 @@ fn is_ref_and_impls_iter_method( let has_wanted_method = ty .iterate_method_candidates( sema.db, - krate, + &scope, &traits_in_scope, None, Some(&wanted_method), - |_, func| { + |func| { if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) { return Some(()); } diff --git a/crates/ide_assists/src/handlers/destructure_tuple_binding.rs b/crates/ide_assists/src/handlers/destructure_tuple_binding.rs index 5f361f01dc1d..ac17e3f057a5 100644 --- a/crates/ide_assists/src/handlers/destructure_tuple_binding.rs +++ b/crates/ide_assists/src/handlers/destructure_tuple_binding.rs @@ -371,7 +371,7 @@ fn handle_ref_field_usage(ctx: &AssistContext, field_expr: &FieldExpr) -> RefDat fn is_auto_ref(ctx: &AssistContext, call_expr: &MethodCallExpr) -> bool { fn impl_(ctx: &AssistContext, call_expr: &MethodCallExpr) -> Option { let rec = call_expr.receiver()?; - let rec_ty = ctx.sema.type_of_expr(&rec)?.adjusted(); + let rec_ty = ctx.sema.type_of_expr(&rec)?.original(); // input must be actual value if rec_ty.is_reference() { return Some(false); diff --git a/crates/ide_assists/src/handlers/extract_module.rs b/crates/ide_assists/src/handlers/extract_module.rs index 6cc311fd7585..64875adfae23 100644 --- a/crates/ide_assists/src/handlers/extract_module.rs +++ b/crates/ide_assists/src/handlers/extract_module.rs @@ -779,7 +779,8 @@ fn get_replacements_for_visibilty_change( ast::Item::Enum(it) => replacements.push((it.visibility(), it.syntax().clone())), ast::Item::ExternCrate(it) => replacements.push((it.visibility(), it.syntax().clone())), ast::Item::Fn(it) => replacements.push((it.visibility(), it.syntax().clone())), - ast::Item::Impl(it) => impls.push(it), + //Associated item's visibility should not be changed + ast::Item::Impl(it) if it.for_token().is_none() => impls.push(it), ast::Item::MacroRules(it) => replacements.push((it.visibility(), it.syntax().clone())), ast::Item::MacroDef(it) => replacements.push((it.visibility(), it.syntax().clone())), ast::Item::Module(it) => replacements.push((it.visibility(), it.syntax().clone())), @@ -825,11 +826,7 @@ fn add_change_vis( vis: Option, node_or_token_opt: Option, ) -> Option<()> { - if let Some(vis) = vis { - if vis.syntax().text() == "pub" { - ted::replace(vis.syntax(), make::visibility_pub_crate().syntax().clone_for_update()); - } - } else { + if let None = vis { if let Some(node_or_token) = node_or_token_opt { let pub_crate_vis = make::visibility_pub_crate().clone_for_update(); if let Some(node) = node_or_token.as_node() { @@ -962,8 +959,8 @@ mod modname { pub(crate) inner: SomeType, } - pub(crate) struct PrivateStruct1 { - pub(crate) inner: i32, + pub struct PrivateStruct1 { + pub inner: i32, } impl PrivateStruct { @@ -1033,7 +1030,7 @@ mod modname { pub(crate) struct A {} impl A { - pub(crate) fn new_a() -> i32 { + pub fn new_a() -> i32 { 2 } } @@ -1148,7 +1145,7 @@ mod modname { pub struct PrivateStruct; $0struct Strukt { - field: PrivateStruct, + field: PrivateStruct, }$0 struct Strukt1 { @@ -1164,7 +1161,7 @@ mod modname { use super::PrivateStruct; pub(crate) struct Strukt { - pub(crate) field: PrivateStruct, + pub(crate) field: PrivateStruct, } } @@ -1203,7 +1200,7 @@ mod modname { use super::A; impl A { - pub(crate) fn new_a() -> i32 { + pub fn new_a() -> i32 { 2 } } @@ -1251,7 +1248,7 @@ mod modname { use super::super::foo::A; impl A { - pub(crate) fn new_a() -> i32 { + pub fn new_a() -> i32 { 2 } } @@ -1378,4 +1375,39 @@ mod modname { ", ) } + + #[test] + fn test_do_not_apply_visibility_modifier_to_trait_impl_items() { + check_assist( + extract_module, + r" + trait ATrait { + fn function(); + } + + struct A {} + +$0impl ATrait for A { + fn function() {} +}$0 + ", + r" + trait ATrait { + fn function(); + } + + struct A {} + +mod modname { + use super::A; + + use super::ATrait; + + impl ATrait for A { + fn function() {} + } +} + ", + ) + } } diff --git a/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs index 82e0970cc4bf..1cdd4187af4e 100644 --- a/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs @@ -11,15 +11,14 @@ use ide_db::{ search::FileReference, RootDatabase, }; -use itertools::Itertools; +use itertools::{Itertools, Position}; use rustc_hash::FxHashSet; use syntax::{ ast::{ self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasAttrs, HasGenericParams, - HasName, HasTypeBounds, HasVisibility, + HasName, HasVisibility, }, - match_ast, - ted::{self, Position}, + match_ast, ted, SyntaxElement, SyntaxKind::*, SyntaxNode, T, }; @@ -106,7 +105,12 @@ pub(crate) fn extract_struct_from_enum_variant( } let indent = enum_ast.indent_level(); - let def = create_struct_def(variant_name.clone(), &variant, &field_list, &enum_ast); + let generic_params = enum_ast + .generic_param_list() + .and_then(|known_generics| extract_generic_params(&known_generics, &field_list)); + let generics = generic_params.as_ref().map(|generics| generics.clone_for_update()); + let def = + create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast); def.reindent_to(indent); let start_offset = &variant.parent_enum().syntax().clone(); @@ -118,7 +122,7 @@ pub(crate) fn extract_struct_from_enum_variant( ], ); - update_variant(&variant, enum_ast.generic_param_list()); + update_variant(&variant, generic_params.map(|g| g.clone_for_update())); }, ) } @@ -159,10 +163,77 @@ fn existing_definition(db: &RootDatabase, variant_name: &ast::Name, variant: &Va .any(|(name, _)| name.to_string() == variant_name.to_string()) } +fn extract_generic_params( + known_generics: &ast::GenericParamList, + field_list: &Either, +) -> Option { + let mut generics = known_generics.generic_params().map(|param| (param, false)).collect_vec(); + + let tagged_one = match field_list { + Either::Left(field_list) => field_list + .fields() + .filter_map(|f| f.ty()) + .fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged), + Either::Right(field_list) => field_list + .fields() + .filter_map(|f| f.ty()) + .fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged), + }; + + let generics = generics.into_iter().filter_map(|(param, tag)| tag.then(|| param)); + tagged_one.then(|| make::generic_param_list(generics)) +} + +fn tag_generics_in_variant(ty: &ast::Type, generics: &mut [(ast::GenericParam, bool)]) -> bool { + let mut tagged_one = false; + + for token in ty.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token) { + for (param, tag) in generics.iter_mut().filter(|(_, tag)| !tag) { + match param { + ast::GenericParam::LifetimeParam(lt) + if matches!(token.kind(), T![lifetime_ident]) => + { + if let Some(lt) = lt.lifetime() { + if lt.text().as_str() == token.text() { + *tag = true; + tagged_one = true; + break; + } + } + } + param if matches!(token.kind(), T![ident]) => { + if match param { + ast::GenericParam::ConstParam(konst) => konst + .name() + .map(|name| name.text().as_str() == token.text()) + .unwrap_or_default(), + ast::GenericParam::TypeParam(ty) => ty + .name() + .map(|name| name.text().as_str() == token.text()) + .unwrap_or_default(), + ast::GenericParam::LifetimeParam(lt) => lt + .lifetime() + .map(|lt| lt.text().as_str() == token.text()) + .unwrap_or_default(), + } { + *tag = true; + tagged_one = true; + break; + } + } + _ => (), + } + } + } + + tagged_one +} + fn create_struct_def( variant_name: ast::Name, variant: &ast::Variant, field_list: &Either, + generics: Option, enum_: &ast::Enum, ) -> ast::Struct { let enum_vis = enum_.visibility(); @@ -204,9 +275,7 @@ fn create_struct_def( field_list.reindent_to(IndentLevel::single()); - // FIXME: This uses all the generic params of the enum, but the variant might not use all of them. - let strukt = make::struct_(enum_vis, variant_name, enum_.generic_param_list(), field_list) - .clone_for_update(); + let strukt = make::struct_(enum_vis, variant_name, generics, field_list).clone_for_update(); // FIXME: Consider making this an actual function somewhere (like in `AttrsOwnerEdit`) after some deliberation let attrs_and_docs = |node: &SyntaxNode| { @@ -233,36 +302,53 @@ fn create_struct_def( _ => tok, }) .collect(); - ted::insert_all(Position::first_child_of(strukt.syntax()), variant_attrs); + ted::insert_all(ted::Position::first_child_of(strukt.syntax()), variant_attrs); // copy attributes from enum ted::insert_all( - Position::first_child_of(strukt.syntax()), + ted::Position::first_child_of(strukt.syntax()), enum_.attrs().map(|it| it.syntax().clone_for_update().into()).collect(), ); strukt } -fn update_variant(variant: &ast::Variant, generic: Option) -> Option<()> { +fn update_variant(variant: &ast::Variant, generics: Option) -> Option<()> { let name = variant.name()?; - let ty = match generic { - // FIXME: This uses all the generic params of the enum, but the variant might not use all of them. - Some(gpl) => { - let gpl = gpl.clone_for_update(); - gpl.generic_params().for_each(|gp| { - let tbl = match gp { - ast::GenericParam::LifetimeParam(it) => it.type_bound_list(), - ast::GenericParam::TypeParam(it) => it.type_bound_list(), - ast::GenericParam::ConstParam(_) => return, - }; - if let Some(tbl) = tbl { - tbl.remove(); + let ty = generics + .filter(|generics| generics.generic_params().count() > 0) + .map(|generics| { + let mut generic_str = String::with_capacity(8); + + for (p, more) in generics.generic_params().with_position().map(|p| match p { + Position::First(p) | Position::Middle(p) => (p, true), + Position::Last(p) | Position::Only(p) => (p, false), + }) { + match p { + ast::GenericParam::ConstParam(konst) => { + if let Some(name) = konst.name() { + generic_str.push_str(name.text().as_str()); + } + } + ast::GenericParam::LifetimeParam(lt) => { + if let Some(lt) = lt.lifetime() { + generic_str.push_str(lt.text().as_str()); + } + } + ast::GenericParam::TypeParam(ty) => { + if let Some(name) = ty.name() { + generic_str.push_str(name.text().as_str()); + } + } } - }); - make::ty(&format!("{}<{}>", name.text(), gpl.generic_params().join(", "))) - } - None => make::ty(&name.text()), - }; + if more { + generic_str.push_str(", "); + } + } + + make::ty(&format!("{}<{}>", &name.text(), &generic_str)) + }) + .unwrap_or_else(|| make::ty(&name.text())); + let tuple_field = make::tuple_field(None, ty); let replacement = make::variant( name, @@ -902,4 +988,92 @@ enum A { $0One(u8, u32) } fn test_extract_not_applicable_no_field_named() { check_assist_not_applicable(extract_struct_from_enum_variant, r"enum A { $0None {} }"); } + + #[test] + fn test_extract_struct_only_copies_needed_generics() { + check_assist( + extract_struct_from_enum_variant, + r#" +enum X<'a, 'b, 'x> { + $0A { a: &'a &'x mut () }, + B { b: &'b () }, + C { c: () }, +} +"#, + r#" +struct A<'a, 'x>{ a: &'a &'x mut () } + +enum X<'a, 'b, 'x> { + A(A<'a, 'x>), + B { b: &'b () }, + C { c: () }, +} +"#, + ); + } + + #[test] + fn test_extract_struct_with_liftime_type_const() { + check_assist( + extract_struct_from_enum_variant, + r#" +enum X<'b, T, V, const C: usize> { + $0A { a: T, b: X<'b>, c: [u8; C] }, + D { d: V }, +} +"#, + r#" +struct A<'b, T, const C: usize>{ a: T, b: X<'b>, c: [u8; C] } + +enum X<'b, T, V, const C: usize> { + A(A<'b, T, C>), + D { d: V }, +} +"#, + ); + } + + #[test] + fn test_extract_struct_without_generics() { + check_assist( + extract_struct_from_enum_variant, + r#" +enum X<'a, 'b> { + A { a: &'a () }, + B { b: &'b () }, + $0C { c: () }, +} +"#, + r#" +struct C{ c: () } + +enum X<'a, 'b> { + A { a: &'a () }, + B { b: &'b () }, + C(C), +} +"#, + ); + } + + #[test] + fn test_extract_struct_keeps_trait_bounds() { + check_assist( + extract_struct_from_enum_variant, + r#" +enum En { + $0A { a: T }, + B { b: V }, +} +"#, + r#" +struct A{ a: T } + +enum En { + A(A), + B { b: V }, +} +"#, + ); + } } diff --git a/crates/ide_assists/src/handlers/generate_is_empty_from_len.rs b/crates/ide_assists/src/handlers/generate_is_empty_from_len.rs index cb3fbed21995..db0cfbe2ec0f 100644 --- a/crates/ide_assists/src/handlers/generate_is_empty_from_len.rs +++ b/crates/ide_assists/src/handlers/generate_is_empty_from_len.rs @@ -90,10 +90,9 @@ fn get_impl_method( let impl_def: hir::Impl = ctx.sema.to_def(impl_)?; let scope = ctx.sema.scope(impl_.syntax()); - let krate = impl_def.module(db).krate(); let ty = impl_def.self_ty(db); let traits_in_scope = scope.visible_traits(); - ty.iterate_method_candidates(db, krate, &traits_in_scope, None, Some(fn_name), |_, func| { + ty.iterate_method_candidates(db, &scope, &traits_in_scope, None, Some(fn_name), |func| { Some(func) }) } diff --git a/crates/ide_completion/src/completions/dot.rs b/crates/ide_completion/src/completions/dot.rs index 3bb3f883cdb9..981e25b55c7b 100644 --- a/crates/ide_completion/src/completions/dot.rs +++ b/crates/ide_completion/src/completions/dot.rs @@ -74,30 +74,28 @@ fn complete_methods( receiver: &hir::Type, mut f: impl FnMut(hir::Function), ) { - if let Some(krate) = ctx.krate { - let mut seen_methods = FxHashSet::default(); - let mut traits_in_scope = ctx.scope.visible_traits(); - - // Remove drop from the environment as calling `Drop::drop` is not allowed - if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() { - cov_mark::hit!(dot_remove_drop_trait); - traits_in_scope.remove(&drop_trait.into()); - } - - receiver.iterate_method_candidates( - ctx.db, - krate, - &traits_in_scope, - ctx.module, - None, - |_ty, func| { - if func.self_param(ctx.db).is_some() && seen_methods.insert(func.name(ctx.db)) { - f(func); - } - None::<()> - }, - ); - } + let mut seen_methods = FxHashSet::default(); + let mut traits_in_scope = ctx.scope.visible_traits(); + + // Remove drop from the environment as calling `Drop::drop` is not allowed + if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() { + cov_mark::hit!(dot_remove_drop_trait); + traits_in_scope.remove(&drop_trait.into()); + } + + receiver.iterate_method_candidates( + ctx.db, + &ctx.scope, + &traits_in_scope, + ctx.module, + None, + |func| { + if func.self_param(ctx.db).is_some() && seen_methods.insert(func.name(ctx.db)) { + f(func); + } + None::<()> + }, + ); } #[cfg(test)] diff --git a/crates/ide_completion/src/completions/pattern.rs b/crates/ide_completion/src/completions/pattern.rs index f1b4fa720591..c8a9cf21da12 100644 --- a/crates/ide_completion/src/completions/pattern.rs +++ b/crates/ide_completion/src/completions/pattern.rs @@ -134,39 +134,37 @@ fn pattern_path_completion( .for_each(|variant| acc.add_enum_variant(ctx, variant, None)); } res @ (hir::PathResolution::TypeParam(_) | hir::PathResolution::SelfType(_)) => { - if let Some(krate) = ctx.krate { - let ty = match res { - hir::PathResolution::TypeParam(param) => param.ty(ctx.db), - hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db), - _ => return, - }; + let ty = match res { + hir::PathResolution::TypeParam(param) => param.ty(ctx.db), + hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db), + _ => return, + }; - if let Some(hir::Adt::Enum(e)) = ty.as_adt() { - e.variants(ctx.db) - .into_iter() - .for_each(|variant| acc.add_enum_variant(ctx, variant, None)); - } + if let Some(hir::Adt::Enum(e)) = ty.as_adt() { + e.variants(ctx.db) + .into_iter() + .for_each(|variant| acc.add_enum_variant(ctx, variant, None)); + } - let traits_in_scope = ctx.scope.visible_traits(); - let mut seen = FxHashSet::default(); - ty.iterate_path_candidates( - ctx.db, - krate, - &traits_in_scope, - ctx.module, - None, - |_ty, item| { - // Note associated consts cannot be referenced in patterns - if let AssocItem::TypeAlias(ta) = item { - // We might iterate candidates of a trait multiple times here, so deduplicate them. - if seen.insert(item) { - acc.add_type_alias(ctx, ta); - } + let traits_in_scope = ctx.scope.visible_traits(); + let mut seen = FxHashSet::default(); + ty.iterate_path_candidates( + ctx.db, + &ctx.scope, + &traits_in_scope, + ctx.module, + None, + |item| { + // Note associated consts cannot be referenced in patterns + if let AssocItem::TypeAlias(ta) = item { + // We might iterate candidates of a trait multiple times here, so deduplicate them. + if seen.insert(item) { + acc.add_type_alias(ctx, ta); } - None::<()> - }, - ); - } + } + None::<()> + }, + ); } _ => {} } diff --git a/crates/ide_completion/src/completions/qualified_path.rs b/crates/ide_completion/src/completions/qualified_path.rs index cf78f7c1adf4..d63aacbadbc9 100644 --- a/crates/ide_completion/src/completions/qualified_path.rs +++ b/crates/ide_completion/src/completions/qualified_path.rs @@ -138,11 +138,11 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon let traits_in_scope = ctx.scope.visible_traits(); ty.iterate_path_candidates( ctx.db, - krate, + &ctx.scope, &traits_in_scope, ctx.module, None, - |_ty, item| { + |item| { add_assoc_item(acc, ctx, item); None::<()> }, @@ -164,35 +164,33 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon } } hir::PathResolution::TypeParam(_) | hir::PathResolution::SelfType(_) => { - if let Some(krate) = ctx.krate { - let ty = match resolution { - hir::PathResolution::TypeParam(param) => param.ty(ctx.db), - hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db), - _ => return, - }; - - if let Some(hir::Adt::Enum(e)) = ty.as_adt() { - add_enum_variants(acc, ctx, e); - } + let ty = match resolution { + hir::PathResolution::TypeParam(param) => param.ty(ctx.db), + hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db), + _ => return, + }; - let traits_in_scope = ctx.scope.visible_traits(); - let mut seen = FxHashSet::default(); - ty.iterate_path_candidates( - ctx.db, - krate, - &traits_in_scope, - ctx.module, - None, - |_ty, item| { - // We might iterate candidates of a trait multiple times here, so deduplicate - // them. - if seen.insert(item) { - add_assoc_item(acc, ctx, item); - } - None::<()> - }, - ); + if let Some(hir::Adt::Enum(e)) = ty.as_adt() { + add_enum_variants(acc, ctx, e); } + + let traits_in_scope = ctx.scope.visible_traits(); + let mut seen = FxHashSet::default(); + ty.iterate_path_candidates( + ctx.db, + &ctx.scope, + &traits_in_scope, + ctx.module, + None, + |item| { + // We might iterate candidates of a trait multiple times here, so deduplicate + // them. + if seen.insert(item) { + add_assoc_item(acc, ctx, item); + } + None::<()> + }, + ); } _ => {} } diff --git a/crates/ide_completion/src/config.rs b/crates/ide_completion/src/config.rs index 5e5c7efdfb92..c4e91e72830b 100644 --- a/crates/ide_completion/src/config.rs +++ b/crates/ide_completion/src/config.rs @@ -13,6 +13,7 @@ pub struct CompletionConfig { pub enable_postfix_completions: bool, pub enable_imports_on_the_fly: bool, pub enable_self_on_the_fly: bool, + pub enable_private_editable: bool, pub add_call_parenthesis: bool, pub add_call_argument_snippets: bool, pub snippet_cap: Option, diff --git a/crates/ide_completion/src/context.rs b/crates/ide_completion/src/context.rs index e986c28b1466..c4e145ffcb55 100644 --- a/crates/ide_completion/src/context.rs +++ b/crates/ide_completion/src/context.rs @@ -360,6 +360,9 @@ impl<'a> CompletionContext<'a> { None => return Visible::No, }; if !vis.is_visible_from(self.db, module.into()) { + if !self.config.enable_private_editable { + return Visible::No; + } // If the definition location is editable, also show private items let root_file = defining_crate.root_file(self.db); let source_root_id = self.db.file_source_root(root_file); diff --git a/crates/ide_completion/src/tests.rs b/crates/ide_completion/src/tests.rs index f063a9638ca2..45c7c58dc03a 100644 --- a/crates/ide_completion/src/tests.rs +++ b/crates/ide_completion/src/tests.rs @@ -64,6 +64,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig { enable_postfix_completions: true, enable_imports_on_the_fly: true, enable_self_on_the_fly: true, + enable_private_editable: true, add_call_parenthesis: true, add_call_argument_snippets: true, snippet_cap: SnippetCap::new(true), diff --git a/crates/ide_db/src/defs.rs b/crates/ide_db/src/defs.rs index 08104efcdc2f..08530f84fb92 100644 --- a/crates/ide_db/src/defs.rs +++ b/crates/ide_db/src/defs.rs @@ -206,8 +206,23 @@ impl NameClass { let parent = name.syntax().parent()?; - let def = if let Some(item) = ast::Item::cast(parent.clone()) { - match item { + let definition = match_ast! { + match parent { + ast::Item(it) => classify_item(sema, it)?, + ast::IdentPat(it) => return classify_ident_pat(sema, it), + ast::Rename(it) => classify_rename(sema, it)?, + ast::SelfParam(it) => Definition::Local(sema.to_def(&it)?), + ast::RecordField(it) => Definition::Field(sema.to_def(&it)?), + ast::Variant(it) => Definition::Variant(sema.to_def(&it)?), + ast::TypeParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()), + ast::ConstParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()), + _ => return None, + } + }; + return Some(NameClass::Definition(definition)); + + fn classify_item(sema: &Semantics, item: ast::Item) -> Option { + let definition = match item { ast::Item::MacroRules(it) => { Definition::Macro(sema.to_def(&ast::Macro::MacroRules(it))?) } @@ -229,14 +244,20 @@ impl NameClass { ast::Item::Struct(it) => Definition::Adt(hir::Adt::Struct(sema.to_def(&it)?)), ast::Item::Union(it) => Definition::Adt(hir::Adt::Union(sema.to_def(&it)?)), _ => return None, - } - } else if let Some(it) = ast::IdentPat::cast(parent.clone()) { - if let Some(def) = sema.resolve_bind_pat_to_const(&it) { + }; + Some(definition) + } + + fn classify_ident_pat( + sema: &Semantics, + ident_pat: ast::IdentPat, + ) -> Option { + if let Some(def) = sema.resolve_bind_pat_to_const(&ident_pat) { return Some(NameClass::ConstReference(Definition::from(def))); } - let local = sema.to_def(&it)?; - let pat_parent = it.syntax().parent(); + let local = sema.to_def(&ident_pat)?; + let pat_parent = ident_pat.syntax().parent(); if let Some(record_pat_field) = pat_parent.and_then(ast::RecordPatField::cast) { if record_pat_field.name_ref().is_none() { if let Some(field) = sema.resolve_record_pat_field(&record_pat_field) { @@ -247,57 +268,23 @@ impl NameClass { } } } + Some(NameClass::Definition(Definition::Local(local))) + } - Definition::Local(local) - } else if let Some(it) = ast::Rename::cast(parent.clone()) { - if let Some(use_tree) = it.syntax().parent().and_then(ast::UseTree::cast) { + fn classify_rename( + sema: &Semantics, + rename: ast::Rename, + ) -> Option { + if let Some(use_tree) = rename.syntax().parent().and_then(ast::UseTree::cast) { let path = use_tree.path()?; - let path_segment = path.segment()?; - let name_ref = path_segment.name_ref()?; - let name_ref = if name_ref.self_token().is_some() { - use_tree - .syntax() - .parent() - .as_ref() - // Skip over UseTreeList - .and_then(|it| { - let use_tree = it.parent().and_then(ast::UseTree::cast)?; - let path = use_tree.path()?; - let path_segment = path.segment()?; - path_segment.name_ref() - }) - .unwrap_or(name_ref) - } else { - name_ref - }; - let name_ref_class = NameRefClass::classify(sema, &name_ref)?; - - match name_ref_class { - NameRefClass::Definition(def) => def, - NameRefClass::FieldShorthand { local_ref: _, field_ref } => { - Definition::Field(field_ref) - } - } + sema.resolve_path(&path).map(Definition::from) } else { - let extern_crate = it.syntax().parent().and_then(ast::ExternCrate::cast)?; + let extern_crate = rename.syntax().parent().and_then(ast::ExternCrate::cast)?; let krate = sema.resolve_extern_crate(&extern_crate)?; let root_module = krate.root_module(sema.db); - Definition::Module(root_module) - } - } else { - match_ast! { - match parent { - ast::SelfParam(it) => Definition::Local(sema.to_def(&it)?), - ast::RecordField(it) => Definition::Field(sema.to_def(&it)?), - ast::Variant(it) => Definition::Variant(sema.to_def(&it)?), - ast::TypeParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()), - ast::ConstParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()), - _ => return None, - } + Some(Definition::Module(root_module)) } - }; - - Some(NameClass::Definition(def)) + } } pub fn classify_lifetime( @@ -307,19 +294,14 @@ impl NameClass { let _p = profile::span("classify_lifetime").detail(|| lifetime.to_string()); let parent = lifetime.syntax().parent()?; - match_ast! { - match parent { - ast::LifetimeParam(it) => { - let def = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::GenericParam(def.into()))) - }, - ast::Label(it) => { - let def = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::Label(def))) - }, - _ => None, - } + if let Some(it) = ast::LifetimeParam::cast(parent.clone()) { + sema.to_def(&it).map(Into::into).map(Definition::GenericParam) + } else if let Some(it) = ast::Label::cast(parent.clone()) { + sema.to_def(&it).map(Definition::Label) + } else { + None } + .map(NameClass::Definition) } } diff --git a/crates/ide_db/src/helpers.rs b/crates/ide_db/src/helpers.rs index cbe4adf1b9c7..fcad17298483 100644 --- a/crates/ide_db/src/helpers.rs +++ b/crates/ide_db/src/helpers.rs @@ -16,7 +16,8 @@ use hir::{ItemInNs, MacroDef, ModuleDef, Name, Semantics}; use itertools::Itertools; use syntax::{ ast::{self, make, HasLoopBody}, - AstNode, AstToken, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent, T, + AstNode, AstToken, Preorder, RustLanguage, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent, + T, }; use crate::{defs::Definition, RootDatabase}; @@ -190,46 +191,102 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { } } -/// Calls `cb` on each break expr inside of `body` that is applicable for the given label. -pub fn for_each_break_expr( +pub fn for_each_break_and_continue_expr( + label: Option, + body: Option, + cb: &mut dyn FnMut(ast::Expr), +) { + let label = label.and_then(|lbl| lbl.lifetime()); + if let Some(b) = body { + let tree_depth_iterator = TreeWithDepthIterator::new(b); + for (expr, depth) in tree_depth_iterator { + match expr { + ast::Expr::BreakExpr(b) + if (depth == 0 && b.lifetime().is_none()) + || eq_label_lt(&label, &b.lifetime()) => + { + cb(ast::Expr::BreakExpr(b)); + } + ast::Expr::ContinueExpr(c) + if (depth == 0 && c.lifetime().is_none()) + || eq_label_lt(&label, &c.lifetime()) => + { + cb(ast::Expr::ContinueExpr(c)); + } + _ => (), + } + } + } +} + +fn for_each_break_expr( label: Option, body: Option, cb: &mut dyn FnMut(ast::BreakExpr), ) { let label = label.and_then(|lbl| lbl.lifetime()); - let mut depth = 0; if let Some(b) = body { - let preorder = &mut b.syntax().preorder(); - let ev_as_expr = |ev| match ev { - WalkEvent::Enter(it) => Some(WalkEvent::Enter(ast::Expr::cast(it)?)), - WalkEvent::Leave(it) => Some(WalkEvent::Leave(ast::Expr::cast(it)?)), - }; - let eq_label = |lt: Option| { - lt.zip(label.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text()) - }; - while let Some(node) = preorder.find_map(ev_as_expr) { - match node { - WalkEvent::Enter(expr) => match expr { - ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_) => { - depth += 1 - } - ast::Expr::BlockExpr(e) if e.label().is_some() => depth += 1, - ast::Expr::BreakExpr(b) - if (depth == 0 && b.lifetime().is_none()) || eq_label(b.lifetime()) => - { - cb(b); - } - _ => (), - }, - WalkEvent::Leave(expr) => match expr { - ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_) => { - depth -= 1 - } - ast::Expr::BlockExpr(e) if e.label().is_some() => depth -= 1, - _ => (), - }, + let tree_depth_iterator = TreeWithDepthIterator::new(b); + for (expr, depth) in tree_depth_iterator { + match expr { + ast::Expr::BreakExpr(b) + if (depth == 0 && b.lifetime().is_none()) + || eq_label_lt(&label, &b.lifetime()) => + { + cb(b); + } + _ => (), + } + } + } +} + +fn eq_label_lt(lt1: &Option, lt2: &Option) -> bool { + lt1.as_ref().zip(lt2.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text()) +} + +struct TreeWithDepthIterator { + preorder: Preorder, + depth: u32, +} + +impl TreeWithDepthIterator { + fn new(body: ast::StmtList) -> Self { + let preorder = body.syntax().preorder(); + Self { preorder, depth: 0 } + } +} + +impl<'a> Iterator for TreeWithDepthIterator { + type Item = (ast::Expr, u32); + + fn next(&mut self) -> Option { + while let Some(event) = self.preorder.find_map(|ev| match ev { + WalkEvent::Enter(it) => ast::Expr::cast(it).map(WalkEvent::Enter), + WalkEvent::Leave(it) => ast::Expr::cast(it).map(WalkEvent::Leave), + }) { + match event { + WalkEvent::Enter( + ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_), + ) => { + self.depth += 1; + } + WalkEvent::Leave( + ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_), + ) => { + self.depth -= 1; + } + WalkEvent::Enter(ast::Expr::BlockExpr(e)) if e.label().is_some() => { + self.depth += 1; + } + WalkEvent::Leave(ast::Expr::BlockExpr(e)) if e.label().is_some() => { + self.depth -= 1; + } + WalkEvent::Enter(expr) => return Some((expr, self.depth)), + _ => (), } } + None } } diff --git a/crates/ide_db/src/helpers/import_assets.rs b/crates/ide_db/src/helpers/import_assets.rs index 319a21735297..d91627e0a921 100644 --- a/crates/ide_db/src/helpers/import_assets.rs +++ b/crates/ide_db/src/helpers/import_assets.rs @@ -1,7 +1,7 @@ //! Look up accessible paths for items. use hir::{ AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, MacroDef, ModPath, Module, - ModuleDef, PathResolution, PrefixKind, ScopeDef, Semantics, Type, + ModuleDef, PathResolution, PrefixKind, ScopeDef, Semantics, SemanticsScope, Type, }; use itertools::Itertools; use rustc_hash::FxHashSet; @@ -239,7 +239,6 @@ impl ImportAssets { let _p = profile::span("import_assets::search_for"); let scope_definitions = self.scope_definitions(sema); - let current_crate = self.module_with_candidate.krate(); let mod_path = |item| { get_mod_path( sema.db, @@ -249,15 +248,18 @@ impl ImportAssets { ) }; + let krate = self.module_with_candidate.krate(); + let scope = sema.scope(&self.candidate_node); + match &self.import_candidate { ImportCandidate::Path(path_candidate) => { - path_applicable_imports(sema, current_crate, path_candidate, mod_path) + path_applicable_imports(sema, krate, path_candidate, mod_path) } ImportCandidate::TraitAssocItem(trait_candidate) => { - trait_applicable_items(sema, current_crate, trait_candidate, true, mod_path) + trait_applicable_items(sema, krate, &scope, trait_candidate, true, mod_path) } ImportCandidate::TraitMethod(trait_candidate) => { - trait_applicable_items(sema, current_crate, trait_candidate, false, mod_path) + trait_applicable_items(sema, krate, &scope, trait_candidate, false, mod_path) } } .into_iter() @@ -447,6 +449,7 @@ fn module_with_segment_name( fn trait_applicable_items( sema: &Semantics, current_crate: Crate, + scope: &SemanticsScope, trait_candidate: &TraitImportCandidate, trait_assoc_item: bool, mod_path: impl Fn(ItemInNs) -> Option, @@ -484,11 +487,11 @@ fn trait_applicable_items( if trait_assoc_item { trait_candidate.receiver_ty.iterate_path_candidates( db, - current_crate, + scope, &trait_candidates, None, None, - |_, assoc| { + |assoc| { if required_assoc_items.contains(&assoc) { if let AssocItem::Function(f) = assoc { if f.self_param(db).is_some() { @@ -511,11 +514,11 @@ fn trait_applicable_items( } else { trait_candidate.receiver_ty.iterate_method_candidates( db, - current_crate, + scope, &trait_candidates, None, None, - |_, function| { + |function| { let assoc = function.as_assoc_item(db)?; if required_assoc_items.contains(&assoc) { let located_trait = assoc.containing_trait(db)?; diff --git a/crates/ide_ssr/src/resolving.rs b/crates/ide_ssr/src/resolving.rs index 844b19779a87..e15cb874efbe 100644 --- a/crates/ide_ssr/src/resolving.rs +++ b/crates/ide_ssr/src/resolving.rs @@ -222,11 +222,11 @@ impl<'db> ResolutionScope<'db> { let module = self.scope.module()?; adt.ty(self.scope.db).iterate_path_candidates( self.scope.db, - module.krate(), + &self.scope, &self.scope.visible_traits(), Some(module), None, - |_ty, assoc_item| { + |assoc_item| { let item_name = assoc_item.name(self.scope.db)?; if item_name.to_smol_str().as_str() == name.text() { Some(hir::PathResolution::AssocItem(assoc_item)) diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 55a542c3c16d..f52e1e751278 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -10,7 +10,11 @@ use hir::{ db::{AstDatabase, DefDatabase, HirDatabase}, AssocItem, Crate, Function, HasSource, HirDisplay, ModuleDef, }; -use hir_def::{body::BodySourceMap, expr::ExprId, FunctionId}; +use hir_def::{ + body::{BodySourceMap, SyntheticSyntax}, + expr::ExprId, + FunctionId, +}; use hir_ty::{TyExt, TypeWalk}; use ide::{Analysis, AnalysisHost, LineCol, RootDatabase}; use ide_db::base_db::{ @@ -28,7 +32,7 @@ use syntax::{AstNode, SyntaxNode}; use vfs::{AbsPathBuf, Vfs, VfsPath}; use crate::cli::{ - flags, + flags::{self, OutputFormat}, load_cargo::{load_workspace, LoadCargoConfig}, print_memory_usage, progress_report::ProgressReport, @@ -191,7 +195,7 @@ impl flags::AnalysisStats { ) { let mut bar = match verbosity { Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(), - _ if self.parallel => ProgressReport::hidden(), + _ if self.parallel || self.output.is_some() => ProgressReport::hidden(), _ => ProgressReport::new(funcs.len() as u64), }; @@ -252,7 +256,7 @@ impl flags::AnalysisStats { for (expr_id, _) in body.exprs.iter() { let ty = &inference_result[expr_id]; num_exprs += 1; - if ty.is_unknown() { + let unknown_or_partial = if ty.is_unknown() { num_exprs_unknown += 1; if verbosity.is_spammy() { if let Some((path, start, end)) = @@ -270,6 +274,7 @@ impl flags::AnalysisStats { bar.println(format!("{}: Unknown type", name,)); } } + true } else { let mut is_partially_unknown = false; ty.walk(&mut |ty| { @@ -280,7 +285,8 @@ impl flags::AnalysisStats { if is_partially_unknown { num_exprs_partially_unknown += 1; } - } + is_partially_unknown + }; if self.only.is_some() && verbosity.is_spammy() { // in super-verbose mode for just one function, we print every single expression if let Some((_, start, end)) = @@ -298,6 +304,13 @@ impl flags::AnalysisStats { bar.println(format!("unknown location: {}", ty.display(db))); } } + if unknown_or_partial && self.output == Some(OutputFormat::Csv) { + println!( + r#"{},type,"{}""#, + location_csv(db, &analysis, vfs, &sm, expr_id), + ty.display(db) + ); + } if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) { num_type_mismatches += 1; if verbosity.is_verbose() { @@ -323,6 +336,14 @@ impl flags::AnalysisStats { )); } } + if self.output == Some(OutputFormat::Csv) { + println!( + r#"{},mismatch,"{}","{}""#, + location_csv(db, &analysis, vfs, &sm, expr_id), + mismatch.expected.display(db), + mismatch.actual.display(db) + ); + } } } if verbosity.is_spammy() { @@ -358,6 +379,28 @@ impl flags::AnalysisStats { } } +fn location_csv( + db: &RootDatabase, + analysis: &Analysis, + vfs: &Vfs, + sm: &BodySourceMap, + expr_id: ExprId, +) -> String { + let src = match sm.expr_syntax(expr_id) { + Ok(s) => s, + Err(SyntheticSyntax) => return "synthetic,,".to_string(), + }; + let root = db.parse_or_expand(src.file_id).unwrap(); + let node = src.map(|e| e.to_node(&root).syntax().clone()); + let original_range = node.as_ref().original_file_range(db); + let path = vfs.file_path(original_range.file_id); + let line_index = analysis.file_line_index(original_range.file_id).unwrap(); + let text_range = original_range.range; + let (start, end) = + (line_index.line_col(text_range.start()), line_index.line_col(text_range.end())); + format!("{},{}:{},{}:{}", path, start.line + 1, start.col, end.line + 1, end.col) +} + fn expr_syntax_range( db: &RootDatabase, analysis: &Analysis, diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index b759d912c968..19907ebddb6a 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -1,6 +1,6 @@ //! Grammar for the command-line arguments. #![allow(unreachable_pub)] -use std::path::PathBuf; +use std::{path::PathBuf, str::FromStr}; use ide_ssr::{SsrPattern, SsrRule}; @@ -54,6 +54,8 @@ xflags::xflags! { /// Directory with Cargo.toml. required path: PathBuf { + optional --output format: OutputFormat + /// Randomize order in which crates, modules, and items are processed. optional --randomize /// Run type inference in parallel. @@ -160,6 +162,7 @@ pub struct Highlight { pub struct AnalysisStats { pub path: PathBuf, + pub output: Option, pub randomize: bool, pub parallel: bool, pub memory_usage: bool, @@ -215,6 +218,11 @@ impl RustAnalyzer { } // generated end +#[derive(Debug, PartialEq, Eq)] +pub enum OutputFormat { + Csv, +} + impl RustAnalyzer { pub fn verbosity(&self) -> Verbosity { if self.quiet { @@ -227,3 +235,14 @@ impl RustAnalyzer { } } } + +impl FromStr for OutputFormat { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "csv" => Ok(Self::Csv), + _ => Err(format!("unknown output format `{}`", s)), + } + } +} diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 76b72707974c..af779ee000d8 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -161,13 +161,15 @@ config_data! { } }"#, /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc. - completion_postfix_enable: bool = "true", + completion_postfix_enable: bool = "true", /// Toggles the additional completions that automatically add imports when completed. /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. completion_autoimport_enable: bool = "true", /// Toggles the additional completions that automatically show method calls and field accesses /// with `self` prefixed to them when inside a method. - completion_autoself_enable: bool = "true", + completion_autoself_enable: bool = "true", + /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position. + completion_privateEditable_enable: bool = "false", /// Whether to show native rust-analyzer diagnostics. diagnostics_enable: bool = "true", @@ -875,6 +877,7 @@ impl Config { enable_imports_on_the_fly: self.data.completion_autoimport_enable && completion_item_edit_resolve(&self.caps), enable_self_on_the_fly: self.data.completion_autoself_enable, + enable_private_editable: self.data.completion_privateEditable_enable, add_call_parenthesis: self.data.completion_addCallParenthesis, add_call_argument_snippets: self.data.completion_addCallArgumentSnippets, insert_use: self.insert_use_config(), diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index c745af3da693..a910a910bb24 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -134,6 +134,7 @@ fn integrated_completion_benchmark() { enable_postfix_completions: true, enable_imports_on_the_fly: true, enable_self_on_the_fly: true, + enable_private_editable: true, add_call_parenthesis: true, add_call_argument_snippets: true, snippet_cap: SnippetCap::new(true), @@ -171,6 +172,7 @@ fn integrated_completion_benchmark() { enable_postfix_completions: true, enable_imports_on_the_fly: true, enable_self_on_the_fly: true, + enable_private_editable: true, add_call_parenthesis: true, add_call_argument_snippets: true, snippet_cap: SnippetCap::new(true), diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 0f7855a053e1..e30f6cd79c3e 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -52,14 +52,15 @@ pub use crate::{ ptr::{AstPtr, SyntaxNodePtr}, syntax_error::SyntaxError, syntax_node::{ - PreorderWithTokens, SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, - SyntaxToken, SyntaxTreeBuilder, + PreorderWithTokens, RustLanguage, SyntaxElement, SyntaxElementChildren, SyntaxNode, + SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder, }, token_text::TokenText, }; pub use parser::{SyntaxKind, T}; pub use rowan::{ - Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent, + api::Preorder, Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize, + TokenAtOffset, WalkEvent, }; pub use smol_str::SmolStr; diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index b10b0d355222..7e6c8225b184 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -213,6 +213,11 @@ Note that your client must specify the `additionalTextEdits` LSP client capabili Toggles the additional completions that automatically show method calls and field accesses with `self` prefixed to them when inside a method. -- +[[rust-analyzer.completion.privateEditable.enable]]rust-analyzer.completion.privateEditable.enable (default: `false`):: ++ +-- +Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position. +-- [[rust-analyzer.diagnostics.enable]]rust-analyzer.diagnostics.enable (default: `true`):: + -- diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index 0fc28466200a..3578c0d78a6c 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc @@ -100,8 +100,7 @@ It will ask your permission to download the matching language server version bin ===== Nightly We ship nightly releases for VS Code. -You can opt in to these by switching to the pre-release version in the Code extension page or settings. -To help us out with testing the newest code and follow the bleeding edge of our `master`, please use the following config: +To help us out by testing the newest code, you can enable pre-release versions in the Code extension page. ==== Manual installation @@ -437,6 +436,40 @@ Then click on apply, and restart the LSP server for your rust project. https://gitlab.com/cppit/jucipp[juCi++] has built-in support for the language server protocol, and since version 1.7.0 offers installation of both Rust and rust-analyzer when opening a Rust file. +=== Kakoune + +https://kakoune.org/[Kakoune] supports LSP with the help of https://github.com/kak-lsp/kak-lsp[`kak-lsp`]. +Follow the https://github.com/kak-lsp/kak-lsp#installation[instructions] to install `kak-lsp`. +To configure `kak-lsp`, refer to the https://github.com/kak-lsp/kak-lsp#configuring-kak-lsp[configuration section] which is basically about copying the https://github.com/kak-lsp/kak-lsp/blob/master/kak-lsp.toml[configuration file] in the right place (latest versions should use `rust-analyzer` by default). + +Finally, you need to configure Kakoune to talk to `kak-lsp` (see https://github.com/kak-lsp/kak-lsp#usage[Usage section]). +A basic configuration will only get you LSP but you can also activate inlay diagnostics and auto-formatting on save. +The following might help you get all of this. + +[source,txt] +---- +eval %sh{kak-lsp --kakoune -s $kak_session} # Not needed if you load it with plug.kak. +hook global WinSetOption filetype=rust %{ + # Enable LSP + lsp-enable-window + + # Auto-formatting on save + hook window BufWritePre .* lsp-formatting-sync + + # Configure inlay hints (only on save) + hook window -group rust-inlay-hints BufWritePost .* rust-analyzer-inlay-hints + hook -once -always window WinSetOption filetype=.* %{ + remove-hooks window rust-inlay-hints + } +} +---- + +=== Helix + +https://docs.helix-editor.com/[Helix] supports LSP by default. +However, it won't install `rust-analyzer` automatically. +You can follow instructions for installing <>. + == Troubleshooting Start with looking at the rust-analyzer version. diff --git a/editors/code/package.json b/editors/code/package.json index 1c4f6d4a7ebe..5d2f323b6c2f 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -318,7 +318,7 @@ ], "scope": "machine-overridable", "default": null, - "markdownDescription": "Path to rust-analyzer executable (points to bundled binary by default). If this is set, then `#rust-analyzer.updates.channel#` setting is not used" + "markdownDescription": "Path to rust-analyzer executable (points to bundled binary by default)." }, "rust-analyzer.server.extraEnv": { "type": [ @@ -651,6 +651,11 @@ "default": true, "type": "boolean" }, + "rust-analyzer.completion.privateEditable.enable": { + "markdownDescription": "Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.", + "default": false, + "type": "boolean" + }, "rust-analyzer.diagnostics.enable": { "markdownDescription": "Whether to show native rust-analyzer diagnostics.", "default": true, diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs index 69b3cb9c1751..993c64cceaf0 100644 --- a/xtask/src/flags.rs +++ b/xtask/src/flags.rs @@ -113,9 +113,15 @@ pub struct Bb { impl Xtask { pub const HELP: &'static str = Self::HELP_; + #[allow(dead_code)] pub fn from_env() -> xflags::Result { Self::from_env_() } + + #[allow(dead_code)] + pub fn from_vec(args: Vec) -> xflags::Result { + Self::from_vec_(args) + } } // generated end