From ed1f467aabf03a451355b6e0de8de2f6a2e101ce Mon Sep 17 00:00:00 2001 From: jmviz Date: Fri, 17 Feb 2023 17:41:39 -0500 Subject: [PATCH 001/108] add openDocs to context menu. add further restrictions to context menu when clauses to prevent irrelevant commands in non-rust files --- editors/code/package.json | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/editors/code/package.json b/editors/code/package.json index 3610e993f8222..b206c15f34c29 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1869,8 +1869,13 @@ "editor/context": [ { "command": "rust-analyzer.peekTests", - "when": "inRustProject", + "when": "inRustProject && editorTextFocus && editorLangId == rust", "group": "navigation@1000" + }, + { + "command": "rust-analyzer.openDocs", + "when": "inRustProject && editorTextFocus && editorLangId == rust", + "group": "navigation@1001" } ] }, From 443801755c10f64aa3a5b400e8cdc026a7ba6e5e Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Sun, 19 Feb 2023 19:02:51 +0900 Subject: [PATCH 002/108] Refactor - Remove unnecessary references and derefs - Manual formatting --- crates/hir-def/src/lib.rs | 2 +- crates/hir-def/src/nameres.rs | 18 +++++++++--------- crates/hir-def/src/visibility.rs | 5 +++-- crates/hir/src/display.rs | 2 +- 4 files changed, 14 insertions(+), 13 deletions(-) diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index d07c5fb67c6f6..2aab1ccd914c1 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -128,7 +128,7 @@ impl ModuleId { } } -/// An ID of a module, **local** to a specific crate +/// An ID of a module, **local** to a `DefMap`. pub type LocalModuleId = Idx; #[derive(Debug)] diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 393747d304b77..a7ce0360516aa 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -342,7 +342,7 @@ impl DefMap { } pub(crate) fn block_id(&self) -> Option { - self.block.as_ref().map(|block| block.block) + self.block.map(|block| block.block) } pub(crate) fn prelude(&self) -> Option { @@ -354,7 +354,7 @@ impl DefMap { } pub fn module_id(&self, local_id: LocalModuleId) -> ModuleId { - let block = self.block.as_ref().map(|b| b.block); + let block = self.block.map(|b| b.block); ModuleId { krate: self.krate, local_id, block } } @@ -432,9 +432,9 @@ impl DefMap { /// Returns the module containing `local_mod`, either the parent `mod`, or the module containing /// the block, if `self` corresponds to a block expression. pub fn containing_module(&self, local_mod: LocalModuleId) -> Option { - match &self[local_mod].parent { - Some(parent) => Some(self.module_id(*parent)), - None => self.block.as_ref().map(|block| block.parent), + match self[local_mod].parent { + Some(parent) => Some(self.module_id(parent)), + None => self.block.map(|block| block.parent), } } @@ -444,11 +444,11 @@ impl DefMap { let mut buf = String::new(); let mut arc; let mut current_map = self; - while let Some(block) = ¤t_map.block { + while let Some(block) = current_map.block { go(&mut buf, current_map, "block scope", current_map.root); buf.push('\n'); arc = block.parent.def_map(db); - current_map = &*arc; + current_map = &arc; } go(&mut buf, current_map, "crate", current_map.root); return buf; @@ -472,10 +472,10 @@ impl DefMap { let mut buf = String::new(); let mut arc; let mut current_map = self; - while let Some(block) = ¤t_map.block { + while let Some(block) = current_map.block { format_to!(buf, "{:?} in {:?}\n", block.block, block.parent); arc = block.parent.def_map(db); - current_map = &*arc; + current_map = &arc; } format_to!(buf, "crate scope\n"); diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs index 087268a9ecee4..eee73b9f37387 100644 --- a/crates/hir-def/src/visibility.rs +++ b/crates/hir-def/src/visibility.rs @@ -11,7 +11,7 @@ use crate::{ nameres::DefMap, path::{ModPath, PathKind}, resolver::HasResolver, - ConstId, FunctionId, HasModule, LocalFieldId, ModuleId, VariantId, + ConstId, FunctionId, HasModule, LocalFieldId, LocalModuleId, ModuleId, VariantId, }; /// Visibility of an item, not yet resolved. @@ -142,7 +142,8 @@ impl Visibility { arc = to_module.def_map(db); &arc }; - let is_block_root = matches!(to_module.block, Some(_) if to_module_def_map[to_module.local_id].parent.is_none()); + let is_block_root = + to_module.block.is_some() && to_module_def_map[to_module.local_id].parent.is_none(); if is_block_root { to_module = to_module_def_map.containing_module(to_module.local_id).unwrap(); } diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index 0d19420127f54..830d261d78695 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs @@ -50,7 +50,7 @@ impl HirDisplay for Function { let write_self_param = |ty: &TypeRef, f: &mut HirFormatter<'_>| match ty { TypeRef::Path(p) if p.is_self_type() => f.write_str("self"), - TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner,TypeRef::Path(p) if p.is_self_type()) => + TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner, TypeRef::Path(p) if p.is_self_type()) => { f.write_char('&')?; if let Some(lifetime) = lifetime { From 83e24fec98f1fbecb29ea34bfd2bc6e0f32d25aa Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Sun, 19 Feb 2023 23:30:49 +0900 Subject: [PATCH 003/108] Fix associated item visibility in block-local impls --- crates/hir-def/src/nameres/collector.rs | 12 +++--- crates/hir-def/src/nameres/path_resolution.rs | 4 +- crates/hir-def/src/resolver.rs | 4 +- crates/hir-def/src/visibility.rs | 2 +- .../src/handlers/private_assoc_item.rs | 38 +++++++++++++++++++ 5 files changed, 52 insertions(+), 8 deletions(-) diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 4b39a20d86c6e..e3704bf2164b0 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -666,8 +666,10 @@ impl DefCollector<'_> { macro_: Macro2Id, vis: &RawVisibility, ) { - let vis = - self.def_map.resolve_visibility(self.db, module_id, vis).unwrap_or(Visibility::Public); + let vis = self + .def_map + .resolve_visibility(self.db, module_id, vis, false) + .unwrap_or(Visibility::Public); self.def_map.modules[module_id].scope.declare(macro_.into()); self.update( module_id, @@ -831,7 +833,7 @@ impl DefCollector<'_> { let mut def = directive.status.namespaces(); let vis = self .def_map - .resolve_visibility(self.db, module_id, &directive.import.visibility) + .resolve_visibility(self.db, module_id, &directive.import.visibility, false) .unwrap_or(Visibility::Public); match import.kind { @@ -1547,7 +1549,7 @@ impl ModCollector<'_, '_> { }; let resolve_vis = |def_map: &DefMap, visibility| { def_map - .resolve_visibility(db, self.module_id, visibility) + .resolve_visibility(db, self.module_id, visibility, false) .unwrap_or(Visibility::Public) }; @@ -1823,7 +1825,7 @@ impl ModCollector<'_, '_> { ) -> LocalModuleId { let def_map = &mut self.def_collector.def_map; let vis = def_map - .resolve_visibility(self.def_collector.db, self.module_id, visibility) + .resolve_visibility(self.def_collector.db, self.module_id, visibility, false) .unwrap_or(Visibility::Public); let modules = &mut def_map.modules; let origin = match definition { diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs index 1d9d5cccded23..25478481dd0b3 100644 --- a/crates/hir-def/src/nameres/path_resolution.rs +++ b/crates/hir-def/src/nameres/path_resolution.rs @@ -78,6 +78,7 @@ impl DefMap { // pub(path) // ^^^^ this visibility: &RawVisibility, + within_impl: bool, ) -> Option { let mut vis = match visibility { RawVisibility::Module(path) => { @@ -102,7 +103,8 @@ impl DefMap { // `super` to its parent (etc.). However, visibilities must only refer to a module in the // DefMap they're written in, so we restrict them when that happens. if let Visibility::Module(m) = vis { - if self.block_id() != m.block { + // ...unless we're resolving visibility for an associated item in an impl. + if self.block_id() != m.block && !within_impl { cov_mark::hit!(adjust_vis_in_block_def_map); vis = Visibility::Module(self.module_id(self.root())); tracing::debug!("visibility {:?} points outside DefMap, adjusting to {:?}", m, vis); diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index 86958e3daea4d..0a44f65ad4a0e 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -214,10 +214,12 @@ impl Resolver { db: &dyn DefDatabase, visibility: &RawVisibility, ) -> Option { + let within_impl = + self.scopes().find(|scope| matches!(scope, Scope::ImplDefScope(_))).is_some(); match visibility { RawVisibility::Module(_) => { let (item_map, module) = self.item_scope(); - item_map.resolve_visibility(db, module, visibility) + item_map.resolve_visibility(db, module, visibility, within_impl) } RawVisibility::Public => Some(Visibility::Public), } diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs index eee73b9f37387..c9fcaae56cf0c 100644 --- a/crates/hir-def/src/visibility.rs +++ b/crates/hir-def/src/visibility.rs @@ -120,7 +120,7 @@ impl Visibility { self, db: &dyn DefDatabase, def_map: &DefMap, - mut from_module: crate::LocalModuleId, + mut from_module: LocalModuleId, ) -> bool { let mut to_module = match self { Visibility::Module(m) => m, diff --git a/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/crates/ide-diagnostics/src/handlers/private_assoc_item.rs index 0b3121c765d8d..67da5c7f27d14 100644 --- a/crates/ide-diagnostics/src/handlers/private_assoc_item.rs +++ b/crates/ide-diagnostics/src/handlers/private_assoc_item.rs @@ -115,6 +115,44 @@ mod module { fn main(s: module::Struct) { s.method(); } +"#, + ); + } + + #[test] + fn can_see_through_top_level_anonymous_const() { + // regression test for #14046. + check_diagnostics( + r#" +struct S; +mod m { + const _: () = { + impl crate::S { + pub(crate) fn method(self) {} + pub(crate) const A: usize = 42; + } + }; + mod inner { + const _: () = { + impl crate::S { + pub(crate) fn method2(self) {} + pub(crate) const B: usize = 42; + pub(super) fn private(self) {} + pub(super) const PRIVATE: usize = 42; + } + }; + } +} +fn main() { + S.method(); + S::A; + S.method2(); + S::B; + S.private(); + //^^^^^^^^^^^ error: function `private` is private + S::PRIVATE; + //^^^^^^^^^^ error: const `PRIVATE` is private +} "#, ); } From 2351875e6a2e2f30787624002440ea9690b7cc12 Mon Sep 17 00:00:00 2001 From: jmviz Date: Sun, 19 Feb 2023 10:12:44 -0500 Subject: [PATCH 004/108] change titles of commands in context menu to title case. shorten open docs command --- editors/code/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/editors/code/package.json b/editors/code/package.json index b206c15f34c29..243208c4962b5 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -226,7 +226,7 @@ }, { "command": "rust-analyzer.openDocs", - "title": "Open docs under cursor", + "title": "Open Docs", "category": "rust-analyzer" }, { @@ -236,7 +236,7 @@ }, { "command": "rust-analyzer.peekTests", - "title": "Peek related tests", + "title": "Peek Related Tests", "category": "rust-analyzer" }, { From d4166234ef54a1019fe200adb414d0580133cd69 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Sun, 19 Feb 2023 23:32:24 +0900 Subject: [PATCH 005/108] Adjust block-local impl item visibility rendering --- crates/hir/src/display.rs | 31 ++++-- crates/hir/src/lib.rs | 16 ++- crates/ide/src/hover/tests.rs | 178 ++++++++++++++++++++++++++++++++++ 3 files changed, 216 insertions(+), 9 deletions(-) diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index 830d261d78695..66bf2a2900e87 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs @@ -17,15 +17,23 @@ use hir_ty::{ }; use crate::{ - Adt, Const, ConstParam, Enum, Field, Function, GenericParam, HasCrate, HasVisibility, - LifetimeParam, Macro, Module, Static, Struct, Trait, TyBuilder, Type, TypeAlias, - TypeOrConstParam, TypeParam, Union, Variant, + Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, Field, Function, GenericParam, + HasCrate, HasVisibility, LifetimeParam, Macro, Module, Static, Struct, Trait, TyBuilder, Type, + TypeAlias, TypeOrConstParam, TypeParam, Union, Variant, }; impl HirDisplay for Function { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - let data = f.db.function_data(self.id); - write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; + let db = f.db; + let data = db.function_data(self.id); + let container = self.as_assoc_item(db).map(|it| it.container(db)); + let mut module = self.module(db); + if let Some(AssocItemContainer::Impl(_)) = container { + // Block-local impls are "hoisted" to the nearest (non-block) module. + module = module.nearest_non_block_module(db); + } + let module_id = module.id; + write_visibility(module_id, self.visibility(db), f)?; if data.has_default_kw() { f.write_str("default ")?; } @@ -35,7 +43,7 @@ impl HirDisplay for Function { if data.has_async_kw() { f.write_str("async ")?; } - if self.is_unsafe_to_call(f.db) { + if self.is_unsafe_to_call(db) { f.write_str("unsafe ")?; } if let Some(abi) = &data.abi { @@ -442,8 +450,15 @@ fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(), impl HirDisplay for Const { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; - let data = f.db.const_data(self.id); + let db = f.db; + let container = self.as_assoc_item(db).map(|it| it.container(db)); + let mut module = self.module(db); + if let Some(AssocItemContainer::Impl(_)) = container { + // Block-local impls are "hoisted" to the nearest (non-block) module. + module = module.nearest_non_block_module(db); + } + write_visibility(module.id, self.visibility(db), f)?; + let data = db.const_data(self.id); f.write_str("const ")?; match &data.name { Some(name) => write!(f, "{name}: ")?, diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 2cb4ed2c33518..4db0e20098c34 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -46,7 +46,7 @@ use hir_def::{ item_tree::ItemTreeNode, lang_item::{LangItem, LangItemTarget}, layout::{Layout, LayoutError, ReprOptions}, - nameres::{self, diagnostics::DefDiagnostic}, + nameres::{self, diagnostics::DefDiagnostic, ModuleOrigin}, per_ns::PerNs, resolver::{HasResolver, Resolver}, src::HasSource as _, @@ -488,6 +488,20 @@ impl Module { Some(Module { id: def_map.module_id(parent_id) }) } + /// Finds nearest non-block ancestor `Module` (`self` included). + fn nearest_non_block_module(self, db: &dyn HirDatabase) -> Module { + let mut id = self.id; + loop { + let def_map = id.def_map(db.upcast()); + let origin = def_map[id.local_id].origin; + if matches!(origin, ModuleOrigin::BlockExpr { .. }) { + id = id.containing_module(db.upcast()).expect("block without parent module") + } else { + return Module { id }; + } + } + } + pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec { let mut res = vec![self]; let mut curr = self; diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index bd7ce2f1d0d07..c199d1040af7e 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -5647,3 +5647,181 @@ fn main() { "#]], ); } + +#[test] +fn assoc_fn_in_block_local_impl() { + check( + r#" +struct S; +mod m { + const _: () = { + impl crate::S { + pub(crate) fn foo() {} + } + }; +} +fn test() { + S::foo$0(); +} +"#, + expect![[r#" + *foo* + + ```rust + test::S + ``` + + ```rust + pub(crate) fn foo() + ``` + "#]], + ); + + check( + r#" +struct S; +mod m { + const _: () = { + const _: () = { + impl crate::S { + pub(crate) fn foo() {} + } + }; + }; +} +fn test() { + S::foo$0(); +} +"#, + expect![[r#" + *foo* + + ```rust + test::S + ``` + + ```rust + pub(crate) fn foo() + ``` + "#]], + ); + + check( + r#" +struct S; +mod m { + mod inner { + const _: () = { + impl crate::S { + pub(super) fn foo() {} + } + }; + } + + fn test() { + crate::S::foo$0(); + } +} +"#, + expect![[r#" + *foo* + + ```rust + test::S + ``` + + ```rust + pub(super) fn foo() + ``` + "#]], + ); +} + +#[test] +fn assoc_const_in_block_local_impl() { + check( + r#" +struct S; +mod m { + const _: () = { + impl crate::S { + pub(crate) const A: () = (); + } + }; +} +fn test() { + S::A$0; +} +"#, + expect![[r#" + *A* + + ```rust + test + ``` + + ```rust + pub(crate) const A: () = () + ``` + "#]], + ); + + check( + r#" +struct S; +mod m { + const _: () = { + const _: () = { + impl crate::S { + pub(crate) const A: () = (); + } + }; + }; +} +fn test() { + S::A$0; +} +"#, + expect![[r#" + *A* + + ```rust + test + ``` + + ```rust + pub(crate) const A: () = () + ``` + "#]], + ); + + check( + r#" +struct S; +mod m { + mod inner { + const _: () = { + impl crate::S { + pub(super) const A: () = (); + } + }; + } + + fn test() { + crate::S::A$0; + } +} +"#, + expect![[r#" + *A* + + ```rust + test + ``` + + ```rust + pub(super) const A: () = () + ``` + "#]], + ); +} From 7e711da2f07778d62f6411de5da520f1e260d761 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 20 Feb 2023 10:14:12 +0200 Subject: [PATCH 006/108] :arrow_up: rust-analyzer --- Cargo.lock | 6 +- Cargo.toml | 2 + crates/hir-def/Cargo.toml | 2 +- crates/hir-def/src/adt.rs | 5 +- crates/hir-def/src/body.rs | 140 ++++++---- crates/hir-def/src/body/lower.rs | 6 + crates/hir-def/src/body/tests.rs | 13 + crates/hir-def/src/item_tree/lower.rs | 9 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 2 +- .../macro_expansion_tests/mbe/regression.rs | 21 +- crates/hir-def/src/path.rs | 62 +++-- crates/hir-def/src/path/lower.rs | 84 ++++-- crates/hir-def/src/pretty.rs | 2 +- crates/hir-def/src/type_ref.rs | 6 +- crates/hir-expand/Cargo.toml | 2 +- crates/hir-expand/src/lib.rs | 4 + crates/hir-ty/Cargo.toml | 2 +- crates/hir-ty/src/display.rs | 6 +- crates/hir-ty/src/lower.rs | 4 +- crates/hir/Cargo.toml | 2 +- crates/ide-assists/Cargo.toml | 2 +- .../src/handlers/generate_getter.rs | 107 +++++++- crates/ide-assists/src/tests.rs | 42 ++- crates/ide-completion/Cargo.toml | 2 +- .../ide-completion/src/completions/postfix.rs | 12 + crates/ide-completion/src/context.rs | 37 ++- crates/ide-completion/src/context/analysis.rs | 160 ++++++----- crates/ide-completion/src/item.rs | 104 +++----- crates/ide-completion/src/lib.rs | 18 +- crates/ide-completion/src/render.rs | 27 +- crates/ide-completion/src/render/literal.rs | 2 +- .../src/render/union_literal.rs | 2 +- crates/ide-completion/src/tests.rs | 45 ++-- crates/ide-completion/src/tests/expression.rs | 252 ++++++++++++++++++ crates/ide-completion/src/tests/special.rs | 81 +++--- crates/ide-db/Cargo.toml | 3 +- crates/ide-db/src/active_parameter.rs | 80 +++++- crates/ide-db/src/line_index.rs | 195 +++++++------- crates/ide-db/src/search.rs | 82 +++--- crates/ide/Cargo.toml | 2 +- crates/ide/src/hover.rs | 17 ++ crates/ide/src/hover/tests.rs | 35 +++ crates/ide/src/inlay_hints/binding_mode.rs | 29 +- crates/ide/src/lib.rs | 2 +- crates/ide/src/references.rs | 15 ++ crates/ide/src/rename.rs | 5 +- crates/ide/src/shuffle_crate_graph.rs | 22 +- crates/ide/src/signature_help.rs | 124 ++++----- crates/ide/src/syntax_highlighting/tests.rs | 2 +- crates/mbe/Cargo.toml | 2 +- crates/parser/Cargo.toml | 1 + crates/parser/src/grammar.rs | 30 +++ crates/parser/src/grammar/attributes.rs | 2 + crates/parser/src/grammar/expressions.rs | 39 ++- crates/parser/src/grammar/expressions/atom.rs | 30 ++- crates/parser/src/grammar/generic_args.rs | 32 ++- crates/parser/src/grammar/generic_params.rs | 29 +- crates/parser/src/grammar/items/adt.rs | 25 +- crates/parser/src/grammar/params.rs | 14 +- crates/parser/src/grammar/paths.rs | 11 +- crates/parser/src/grammar/types.rs | 3 +- crates/parser/src/tests.rs | 6 + crates/parser/src/tests/top_entries.rs | 2 +- .../0009_broken_struct_type_parameter.rast | 3 +- .../parser/err/0013_invalid_type.rast | 38 ++- .../test_data/parser/err/0022_bad_exprs.rast | 18 +- .../parser/err/0024_many_type_parens.rast | 211 +++++++-------- .../test_data/parser/err/0025_nope.rast | 7 +- .../parser/err/0042_weird_blocks.rast | 2 +- .../parser/err/0048_double_fish.rast | 19 +- .../inline/err/0002_misplaced_label_err.rast | 2 +- .../inline/err/0015_arg_list_recovery.rast | 77 ++++++ .../inline/err/0015_arg_list_recovery.rs | 5 + .../err/0015_missing_fn_param_type.rast | 2 +- crates/proc-macro-api/src/version.rs | 13 +- crates/rust-analyzer/Cargo.toml | 2 +- .../default_12483297303756020505_0.profraw | Bin 0 -> 25152 bytes crates/rust-analyzer/src/caps.rs | 17 +- crates/rust-analyzer/src/cli/lsif.rs | 5 +- crates/rust-analyzer/src/config.rs | 8 +- .../rust-analyzer/src/diagnostics/to_proto.rs | 4 +- crates/rust-analyzer/src/from_proto.rs | 13 +- crates/rust-analyzer/src/line_index.rs | 5 +- crates/rust-analyzer/src/lsp_ext.rs | 27 +- crates/rust-analyzer/src/lsp_utils.rs | 38 ++- crates/rust-analyzer/src/main_loop.rs | 1 + crates/rust-analyzer/src/reload.rs | 5 +- crates/rust-analyzer/src/to_proto.rs | 66 ++--- crates/rust-analyzer/tests/slow-tests/main.rs | 4 +- .../rust-analyzer/tests/slow-tests/support.rs | 1 + crates/stdx/src/lib.rs | 1 + crates/stdx/src/rand.rs | 21 ++ crates/syntax/src/lib.rs | 2 +- docs/dev/architecture.md | 2 +- docs/dev/lsp-extensions.md | 2 +- editors/code/src/commands.ts | 6 + editors/code/src/main.ts | 1 + lib/lsp-server/Cargo.toml | 2 +- 98 files changed, 1800 insertions(+), 942 deletions(-) create mode 100644 crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rast create mode 100644 crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rs create mode 100644 crates/rust-analyzer/default_12483297303756020505_0.profraw create mode 100644 crates/stdx/src/rand.rs diff --git a/Cargo.lock b/Cargo.lock index ef0316f30fb93..ec197767259d4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -711,6 +711,7 @@ dependencies = [ "limit", "memchr", "once_cell", + "oorandom", "parser", "profile", "rayon", @@ -932,9 +933,9 @@ dependencies = [ [[package]] name = "lsp-types" -version = "0.93.2" +version = "0.94.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9be6e9c7e2d18f651974370d7aff703f9513e0df6e464fd795660edc77e6ca51" +checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237" dependencies = [ "bitflags", "serde", @@ -1173,6 +1174,7 @@ dependencies = [ "limit", "rustc-ap-rustc_lexer", "sourcegen", + "stdx", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index ef81105505b03..333f03ce2ffe5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -74,3 +74,5 @@ toolchain = { path = "./crates/toolchain", version = "0.0.0" } tt = { path = "./crates/tt", version = "0.0.0" } vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } +# non-local crates +smallvec = { version = "1.10.0", features = ["const_new", "union", "const_generics"] } diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index 1daf0428c242f..31d4018d2b6ab 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -27,7 +27,7 @@ itertools = "0.10.5" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } once_cell = "1.17.0" rustc-hash = "1.1.0" -smallvec = "1.10.0" +smallvec.workspace = true tracing = "0.1.35" rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false } diff --git a/crates/hir-def/src/adt.rs b/crates/hir-def/src/adt.rs index dcea679567a53..9bc1c54a3c641 100644 --- a/crates/hir-def/src/adt.rs +++ b/crates/hir-def/src/adt.rs @@ -2,9 +2,10 @@ use std::sync::Arc; -use crate::tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree}; use base_db::CrateId; +use cfg::CfgOptions; use either::Either; + use hir_expand::{ name::{AsName, Name}, HirFileId, InFile, @@ -24,12 +25,12 @@ use crate::{ src::HasChildSource, src::HasSource, trace::Trace, + tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree}, type_ref::TypeRef, visibility::RawVisibility, EnumId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId, UnionId, VariantId, }; -use cfg::CfgOptions; /// Note that we use `StructData` for unions as well! #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 9713256813eb6..8fd9255b8b130 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -19,7 +19,7 @@ use la_arena::{Arena, ArenaMap}; use limit::Limit; use profile::Count; use rustc_hash::FxHashMap; -use syntax::{ast, AstPtr, SyntaxNodePtr}; +use syntax::{ast, AstPtr, SyntaxNode, SyntaxNodePtr}; use crate::{ attr::Attrs, @@ -51,7 +51,8 @@ pub struct Expander { def_map: Arc, current_file_id: HirFileId, module: LocalModuleId, - recursion_limit: usize, + /// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached. + recursion_depth: usize, } impl CfgExpander { @@ -84,7 +85,7 @@ impl Expander { def_map, current_file_id, module: module.local_id, - recursion_limit: 0, + recursion_depth: 0, } } @@ -93,31 +94,37 @@ impl Expander { db: &dyn DefDatabase, macro_call: ast::MacroCall, ) -> Result>, UnresolvedMacro> { - if self.recursion_limit(db).check(self.recursion_limit + 1).is_err() { - cov_mark::hit!(your_stack_belongs_to_me); - return Ok(ExpandResult::only_err(ExpandError::Other( - "reached recursion limit during macro expansion".into(), - ))); + let mut unresolved_macro_err = None; + + let result = self.within_limit(db, |this| { + let macro_call = InFile::new(this.current_file_id, ¯o_call); + + let resolver = + |path| this.resolve_path_as_macro(db, &path).map(|it| macro_id_to_def_id(db, it)); + + let mut err = None; + let call_id = match macro_call.as_call_id_with_errors( + db, + this.def_map.krate(), + resolver, + &mut |e| { + err.get_or_insert(e); + }, + ) { + Ok(call_id) => call_id, + Err(resolve_err) => { + unresolved_macro_err = Some(resolve_err); + return ExpandResult { value: None, err: None }; + } + }; + ExpandResult { value: call_id.ok(), err } + }); + + if let Some(err) = unresolved_macro_err { + Err(err) + } else { + Ok(result) } - - let macro_call = InFile::new(self.current_file_id, ¯o_call); - - let resolver = - |path| self.resolve_path_as_macro(db, &path).map(|it| macro_id_to_def_id(db, it)); - - let mut err = None; - let call_id = - macro_call.as_call_id_with_errors(db, self.def_map.krate(), resolver, &mut |e| { - err.get_or_insert(e); - })?; - let call_id = match call_id { - Ok(it) => it, - Err(_) => { - return Ok(ExpandResult { value: None, err }); - } - }; - - Ok(self.enter_expand_inner(db, call_id, err)) } pub fn enter_expand_id( @@ -125,15 +132,14 @@ impl Expander { db: &dyn DefDatabase, call_id: MacroCallId, ) -> ExpandResult> { - self.enter_expand_inner(db, call_id, None) + self.within_limit(db, |_this| ExpandResult::ok(Some(call_id))) } - fn enter_expand_inner( - &mut self, + fn enter_expand_inner( db: &dyn DefDatabase, call_id: MacroCallId, mut err: Option, - ) -> ExpandResult> { + ) -> ExpandResult> { if err.is_none() { err = db.macro_expand_error(call_id); } @@ -154,29 +160,21 @@ impl Expander { } }; - let node = match T::cast(raw_node) { - Some(it) => it, - None => { - // This can happen without being an error, so only forward previous errors. - return ExpandResult { value: None, err }; - } - }; - - tracing::debug!("macro expansion {:#?}", node.syntax()); - - self.recursion_limit += 1; - let mark = - Mark { file_id: self.current_file_id, bomb: DropBomb::new("expansion mark dropped") }; - self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id); - self.current_file_id = file_id; - - ExpandResult { value: Some((mark, node)), err } + ExpandResult { value: Some((file_id, raw_node)), err } } pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) { self.cfg_expander.hygiene = Hygiene::new(db.upcast(), mark.file_id); self.current_file_id = mark.file_id; - self.recursion_limit -= 1; + if self.recursion_depth == usize::MAX { + // Recursion limit has been reached somewhere in the macro expansion tree. Reset the + // depth only when we get out of the tree. + if !self.current_file_id.is_macro() { + self.recursion_depth = 0; + } + } else { + self.recursion_depth -= 1; + } mark.bomb.defuse(); } @@ -215,6 +213,50 @@ impl Expander { #[cfg(test)] return Limit::new(std::cmp::min(32, limit)); } + + fn within_limit( + &mut self, + db: &dyn DefDatabase, + op: F, + ) -> ExpandResult> + where + F: FnOnce(&mut Self) -> ExpandResult>, + { + if self.recursion_depth == usize::MAX { + // Recursion limit has been reached somewhere in the macro expansion tree. We should + // stop expanding other macro calls in this tree, or else this may result in + // exponential number of macro expansions, leading to a hang. + // + // The overflow error should have been reported when it occurred (see the next branch), + // so don't return overflow error here to avoid diagnostics duplication. + cov_mark::hit!(overflow_but_not_me); + return ExpandResult::only_err(ExpandError::RecursionOverflowPosioned); + } else if self.recursion_limit(db).check(self.recursion_depth + 1).is_err() { + self.recursion_depth = usize::MAX; + cov_mark::hit!(your_stack_belongs_to_me); + return ExpandResult::only_err(ExpandError::Other( + "reached recursion limit during macro expansion".into(), + )); + } + + let ExpandResult { value, err } = op(self); + let Some(call_id) = value else { + return ExpandResult { value: None, err }; + }; + + Self::enter_expand_inner(db, call_id, err).map(|value| { + value.and_then(|(new_file_id, node)| { + let node = T::cast(node)?; + + self.recursion_depth += 1; + self.cfg_expander.hygiene = Hygiene::new(db.upcast(), new_file_id); + let old_file_id = std::mem::replace(&mut self.current_file_id, new_file_id); + let mark = + Mark { file_id: old_file_id, bomb: DropBomb::new("expansion mark dropped") }; + Some((mark, node)) + }) + }) + } } #[derive(Debug)] diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index a78fa91f53bd0..04b1c4f01e22a 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -624,6 +624,10 @@ impl ExprCollector<'_> { krate: *krate, }); } + Some(ExpandError::RecursionOverflowPosioned) => { + // Recursion limit has been reached in the macro expansion tree, but not in + // this very macro call. Don't add diagnostics to avoid duplication. + } Some(err) => { self.source_map.diagnostics.push(BodyDiagnostic::MacroError { node: InFile::new(outer_file, syntax_ptr), @@ -636,6 +640,8 @@ impl ExprCollector<'_> { match res.value { Some((mark, expansion)) => { + // Keep collecting even with expansion errors so we can provide completions and + // other services in incomplete macro expressions. self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id); let prev_ast_id_map = mem::replace( &mut self.ast_id_map, diff --git a/crates/hir-def/src/body/tests.rs b/crates/hir-def/src/body/tests.rs index c9601f8552737..edee2c7ff96bf 100644 --- a/crates/hir-def/src/body/tests.rs +++ b/crates/hir-def/src/body/tests.rs @@ -61,6 +61,19 @@ fn main() { n_nuple!(1,2,3); } ); } +#[test] +fn your_stack_belongs_to_me2() { + cov_mark::check!(overflow_but_not_me); + lower( + r#" +macro_rules! foo { + () => {{ foo!(); foo!(); }} +} +fn main() { foo!(); } +"#, + ); +} + #[test] fn recursion_limit() { cov_mark::check!(your_stack_belongs_to_me); diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index 27705cbbbdc58..d4d3c5ef19a60 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -659,15 +659,16 @@ fn desugar_future_path(orig: TypeRef) -> Path { let path = path![core::future::Future]; let mut generic_args: Vec<_> = std::iter::repeat(None).take(path.segments().len() - 1).collect(); - let mut last = GenericArgs::empty(); let binding = AssociatedTypeBinding { name: name![Output], args: None, type_ref: Some(orig), - bounds: Vec::new(), + bounds: Box::default(), }; - last.bindings.push(binding); - generic_args.push(Some(Interned::new(last))); + generic_args.push(Some(Interned::new(GenericArgs { + bindings: Box::new([binding]), + ..GenericArgs::empty() + }))); Path::from_known_path(path, generic_args) } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index 49bbc64bff180..7a3e8c3b05c91 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -1476,7 +1476,7 @@ macro_rules! m { /* parse error: expected identifier */ /* parse error: expected SEMICOLON */ /* parse error: expected SEMICOLON */ -/* parse error: expected expression */ +/* parse error: expected expression, item or let statement */ fn f() { K::(C("0")); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs index d2505e7cafe53..8358a46f0a914 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs @@ -830,8 +830,7 @@ macro_rules! rgb_color { /* parse error: expected COMMA */ /* parse error: expected R_ANGLE */ /* parse error: expected SEMICOLON */ -/* parse error: expected SEMICOLON */ -/* parse error: expected expression */ +/* parse error: expected expression, item or let statement */ pub fn new() { let _ = 0as u32<<(8+8); } @@ -848,21 +847,21 @@ pub fn new() { // BLOCK_EXPR@10..31 // STMT_LIST@10..31 // L_CURLY@10..11 "{" -// LET_STMT@11..27 +// LET_STMT@11..28 // LET_KW@11..14 "let" // WILDCARD_PAT@14..15 // UNDERSCORE@14..15 "_" // EQ@15..16 "=" -// CAST_EXPR@16..27 +// CAST_EXPR@16..28 // LITERAL@16..17 // INT_NUMBER@16..17 "0" // AS_KW@17..19 "as" -// PATH_TYPE@19..27 -// PATH@19..27 -// PATH_SEGMENT@19..27 +// PATH_TYPE@19..28 +// PATH@19..28 +// PATH_SEGMENT@19..28 // NAME_REF@19..22 // IDENT@19..22 "u32" -// GENERIC_ARG_LIST@22..27 +// GENERIC_ARG_LIST@22..28 // L_ANGLE@22..23 "<" // TYPE_ARG@23..27 // DYN_TRAIT_TYPE@23..27 @@ -877,9 +876,9 @@ pub fn new() { // ERROR@25..26 // INT_NUMBER@25..26 "8" // PLUS@26..27 "+" -// EXPR_STMT@27..28 -// LITERAL@27..28 -// INT_NUMBER@27..28 "8" +// CONST_ARG@27..28 +// LITERAL@27..28 +// INT_NUMBER@27..28 "8" // ERROR@28..29 // R_PAREN@28..29 ")" // SEMICOLON@29..30 ";" diff --git a/crates/hir-def/src/path.rs b/crates/hir-def/src/path.rs index 25a23fcd61a51..36d4c36a26894 100644 --- a/crates/hir-def/src/path.rs +++ b/crates/hir-def/src/path.rs @@ -38,18 +38,18 @@ impl Display for ImportAlias { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Path { /// Type based path like `::foo`. - /// Note that paths like `::foo` are desugard to `Trait::::foo`. + /// Note that paths like `::foo` are desugared to `Trait::::foo`. type_anchor: Option>, mod_path: Interned, - /// Invariant: the same len as `self.mod_path.segments` - generic_args: Box<[Option>]>, + /// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`. + generic_args: Option>]>>, } /// Generic arguments to a path segment (e.g. the `i32` in `Option`). This /// also includes bindings of associated types, like in `Iterator`. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct GenericArgs { - pub args: Vec, + pub args: Box<[GenericArg]>, /// This specifies whether the args contain a Self type as the first /// element. This is the case for path segments like ``, where /// `T` is actually a type parameter for the path `Trait` specifying the @@ -57,7 +57,7 @@ pub struct GenericArgs { /// is left out. pub has_self_type: bool, /// Associated type bindings like in `Iterator`. - pub bindings: Vec, + pub bindings: Box<[AssociatedTypeBinding]>, /// Whether these generic args were desugared from `Trait(Arg) -> Output` /// parenthesis notation typically used for the `Fn` traits. pub desugared_from_fn: bool, @@ -77,7 +77,7 @@ pub struct AssociatedTypeBinding { /// Bounds for the associated type, like in `Iterator`. (This is the unstable `associated_type_bounds` /// feature.) - pub bounds: Vec>, + pub bounds: Box<[Interned]>, } /// A single generic argument. @@ -102,7 +102,7 @@ impl Path { ) -> Path { let generic_args = generic_args.into(); assert_eq!(path.len(), generic_args.len()); - Path { type_anchor: None, mod_path: Interned::new(path), generic_args } + Path { type_anchor: None, mod_path: Interned::new(path), generic_args: Some(generic_args) } } pub fn kind(&self) -> &PathKind { @@ -114,7 +114,14 @@ impl Path { } pub fn segments(&self) -> PathSegments<'_> { - PathSegments { segments: self.mod_path.segments(), generic_args: &self.generic_args } + let s = PathSegments { + segments: self.mod_path.segments(), + generic_args: self.generic_args.as_deref(), + }; + if let Some(generic_args) = s.generic_args { + assert_eq!(s.segments.len(), generic_args.len()); + } + s } pub fn mod_path(&self) -> &ModPath { @@ -131,13 +138,15 @@ impl Path { self.mod_path.kind, self.mod_path.segments()[..self.mod_path.segments().len() - 1].iter().cloned(), )), - generic_args: self.generic_args[..self.generic_args.len() - 1].to_vec().into(), + generic_args: self.generic_args.as_ref().map(|it| it[..it.len() - 1].to_vec().into()), }; Some(res) } pub fn is_self_type(&self) -> bool { - self.type_anchor.is_none() && *self.generic_args == [None] && self.mod_path.is_Self() + self.type_anchor.is_none() + && self.generic_args.as_deref().is_none() + && self.mod_path.is_Self() } } @@ -149,11 +158,11 @@ pub struct PathSegment<'a> { pub struct PathSegments<'a> { segments: &'a [Name], - generic_args: &'a [Option>], + generic_args: Option<&'a [Option>]>, } impl<'a> PathSegments<'a> { - pub const EMPTY: PathSegments<'static> = PathSegments { segments: &[], generic_args: &[] }; + pub const EMPTY: PathSegments<'static> = PathSegments { segments: &[], generic_args: None }; pub fn is_empty(&self) -> bool { self.len() == 0 } @@ -167,26 +176,29 @@ impl<'a> PathSegments<'a> { self.get(self.len().checked_sub(1)?) } pub fn get(&self, idx: usize) -> Option> { - assert_eq!(self.segments.len(), self.generic_args.len()); let res = PathSegment { name: self.segments.get(idx)?, - args_and_bindings: self.generic_args.get(idx).unwrap().as_ref().map(|it| &**it), + args_and_bindings: self.generic_args.and_then(|it| it.get(idx)?.as_deref()), }; Some(res) } pub fn skip(&self, len: usize) -> PathSegments<'a> { - assert_eq!(self.segments.len(), self.generic_args.len()); - PathSegments { segments: &self.segments[len..], generic_args: &self.generic_args[len..] } + PathSegments { + segments: &self.segments.get(len..).unwrap_or(&[]), + generic_args: self.generic_args.and_then(|it| it.get(len..)), + } } pub fn take(&self, len: usize) -> PathSegments<'a> { - assert_eq!(self.segments.len(), self.generic_args.len()); - PathSegments { segments: &self.segments[..len], generic_args: &self.generic_args[..len] } + PathSegments { + segments: &self.segments.get(..len).unwrap_or(&self.segments), + generic_args: self.generic_args.map(|it| it.get(..len).unwrap_or(it)), + } } pub fn iter(&self) -> impl Iterator> { - self.segments.iter().zip(self.generic_args.iter()).map(|(name, args)| PathSegment { - name, - args_and_bindings: args.as_ref().map(|it| &**it), - }) + self.segments + .iter() + .zip(self.generic_args.into_iter().flatten().chain(iter::repeat(&None))) + .map(|(name, args)| PathSegment { name, args_and_bindings: args.as_deref() }) } } @@ -200,9 +212,9 @@ impl GenericArgs { pub(crate) fn empty() -> GenericArgs { GenericArgs { - args: Vec::new(), + args: Box::default(), has_self_type: false, - bindings: Vec::new(), + bindings: Box::default(), desugared_from_fn: false, } } @@ -213,7 +225,7 @@ impl From for Path { Path { type_anchor: None, mod_path: Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))), - generic_args: Box::new([None]), + generic_args: None, } } } diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs index d570191595b68..c85a11db6d194 100644 --- a/crates/hir-def/src/path/lower.rs +++ b/crates/hir-def/src/path/lower.rs @@ -1,5 +1,7 @@ //! Transforms syntax into `Path` objects, ideally with accounting for hygiene +use std::iter; + use crate::type_ref::ConstScalarOrPath; use either::Either; @@ -45,8 +47,11 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option { kind = PathKind::DollarCrate(crate_id); @@ -56,7 +61,6 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option { segments.push(name![Self]); - generic_args.push(None) } ast::PathSegmentKind::Type { type_ref, trait_ref } => { assert!(path.qualifier().is_none()); // this can only occur at the first segment @@ -77,18 +81,33 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option it.as_ref().clone(), - None => GenericArgs::empty(), - }; - args_inner.has_self_type = true; - args_inner.args.insert(0, GenericArg::Type(self_type)); - *last_segment = Some(Interned::new(args_inner)); + let last_segment = generic_args.get_mut(segments.len() - num_segments)?; + *last_segment = Some(Interned::new(match last_segment.take() { + Some(it) => GenericArgs { + args: iter::once(self_type) + .chain(it.args.iter().cloned()) + .collect(), + + has_self_type: true, + bindings: it.bindings.clone(), + desugared_from_fn: it.desugared_from_fn, + }, + None => GenericArgs { + args: Box::new([self_type]), + has_self_type: true, + ..GenericArgs::empty() + }, + })); } } } @@ -115,7 +134,10 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option) -> Option Option { if let Some(q) = path.qualifier() { @@ -174,7 +200,7 @@ pub(super) fn lower_generic_args( .map(|it| Interned::new(TypeBound::from_ast(lower_ctx, it))) .collect() } else { - Vec::new() + Box::default() }; bindings.push(AssociatedTypeBinding { name, args, type_ref, bounds }); } @@ -195,7 +221,12 @@ pub(super) fn lower_generic_args( if args.is_empty() && bindings.is_empty() { return None; } - Some(GenericArgs { args, has_self_type: false, bindings, desugared_from_fn: false }) + Some(GenericArgs { + args: args.into_boxed_slice(), + has_self_type: false, + bindings: bindings.into_boxed_slice(), + desugared_from_fn: false, + }) } /// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y) @@ -205,33 +236,30 @@ fn lower_generic_args_from_fn_path( params: Option, ret_type: Option, ) -> Option { - let mut args = Vec::new(); - let mut bindings = Vec::new(); let params = params?; let mut param_types = Vec::new(); for param in params.params() { let type_ref = TypeRef::from_ast_opt(ctx, param.ty()); param_types.push(type_ref); } - let arg = GenericArg::Type(TypeRef::Tuple(param_types)); - args.push(arg); - if let Some(ret_type) = ret_type { + let args = Box::new([GenericArg::Type(TypeRef::Tuple(param_types))]); + let bindings = if let Some(ret_type) = ret_type { let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty()); - bindings.push(AssociatedTypeBinding { + Box::new([AssociatedTypeBinding { name: name![Output], args: None, type_ref: Some(type_ref), - bounds: Vec::new(), - }); + bounds: Box::default(), + }]) } else { // -> () let type_ref = TypeRef::Tuple(Vec::new()); - bindings.push(AssociatedTypeBinding { + Box::new([AssociatedTypeBinding { name: name![Output], args: None, type_ref: Some(type_ref), - bounds: Vec::new(), - }); - } + bounds: Box::default(), + }]) + }; Some(GenericArgs { args, has_self_type: false, bindings, desugared_from_fn: true }) } diff --git a/crates/hir-def/src/pretty.rs b/crates/hir-def/src/pretty.rs index 1c0bd204d309b..2d45c8c8da1a5 100644 --- a/crates/hir-def/src/pretty.rs +++ b/crates/hir-def/src/pretty.rs @@ -71,7 +71,7 @@ pub(crate) fn print_generic_args(generics: &GenericArgs, buf: &mut dyn Write) -> first = false; print_generic_arg(arg, buf)?; } - for binding in &generics.bindings { + for binding in generics.bindings.iter() { if !first { write!(buf, ", ")?; } diff --git a/crates/hir-def/src/type_ref.rs b/crates/hir-def/src/type_ref.rs index 8fa12c7aafda7..9652b01b91bf6 100644 --- a/crates/hir-def/src/type_ref.rs +++ b/crates/hir-def/src/type_ref.rs @@ -292,7 +292,7 @@ impl TypeRef { } for segment in path.segments().iter() { if let Some(args_and_bindings) = segment.args_and_bindings { - for arg in &args_and_bindings.args { + for arg in args_and_bindings.args.iter() { match arg { crate::path::GenericArg::Type(type_ref) => { go(type_ref, f); @@ -301,11 +301,11 @@ impl TypeRef { | crate::path::GenericArg::Lifetime(_) => {} } } - for binding in &args_and_bindings.bindings { + for binding in args_and_bindings.bindings.iter() { if let Some(type_ref) = &binding.type_ref { go(type_ref, f); } - for bound in &binding.bounds { + for bound in binding.bounds.iter() { match bound.as_ref() { TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => { go_path(path, f) diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml index 525cdc32b8751..5c684be03cf24 100644 --- a/crates/hir-expand/Cargo.toml +++ b/crates/hir-expand/Cargo.toml @@ -21,7 +21,7 @@ itertools = "0.10.5" hashbrown = { version = "0.12.1", features = [ "inline-more", ], default-features = false } -smallvec = { version = "1.10.0", features = ["const_new"] } +smallvec.workspace = true # local deps stdx.workspace = true diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index bc941b5417242..a52716cc02c25 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -55,6 +55,7 @@ pub type ExpandResult = ValueResult; pub enum ExpandError { UnresolvedProcMacro(CrateId), Mbe(mbe::ExpandError), + RecursionOverflowPosioned, Other(Box), } @@ -69,6 +70,9 @@ impl fmt::Display for ExpandError { match self { ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"), ExpandError::Mbe(it) => it.fmt(f), + ExpandError::RecursionOverflowPosioned => { + f.write_str("overflow expanding the original macro") + } ExpandError::Other(it) => f.write_str(it), } } diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index 490bbe1e7240d..a8b8d5222e49c 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -16,7 +16,7 @@ cov-mark = "2.0.0-pre.1" itertools = "0.10.5" arrayvec = "0.7.2" bitflags = "1.3.2" -smallvec = "1.10.0" +smallvec.workspace = true ena = "0.14.0" tracing = "0.1.35" rustc-hash = "1.1.0" diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 5fcbdf34f3cbb..b22064d8c42e4 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -1419,7 +1419,7 @@ impl HirDisplay for Path { write!(f, "<")?; let mut first = true; - for arg in &generic_args.args { + for arg in generic_args.args.iter() { if first { first = false; if generic_args.has_self_type { @@ -1431,7 +1431,7 @@ impl HirDisplay for Path { } arg.hir_fmt(f)?; } - for binding in &generic_args.bindings { + for binding in generic_args.bindings.iter() { if first { first = false; } else { @@ -1445,7 +1445,7 @@ impl HirDisplay for Path { } None => { write!(f, ": ")?; - f.write_joined(&binding.bounds, " + ")?; + f.write_joined(binding.bounds.iter(), " + ")?; } } } diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 7cce13a793e02..299646737221d 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -1025,7 +1025,7 @@ impl<'a> TyLoweringContext<'a> { last_segment .into_iter() .filter_map(|segment| segment.args_and_bindings) - .flat_map(|args_and_bindings| &args_and_bindings.bindings) + .flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) .flat_map(move |binding| { let found = associated_type_by_name_including_super_traits( self.db, @@ -1068,7 +1068,7 @@ impl<'a> TyLoweringContext<'a> { AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty }; preds.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq))); } - for bound in &binding.bounds { + for bound in binding.bounds.iter() { preds.extend(self.lower_type_bound( bound, TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner), diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 32cde8a77325c..ef40a8902d73f 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -16,7 +16,7 @@ rustc-hash = "1.1.0" either = "1.7.0" arrayvec = "0.7.2" itertools = "0.10.5" -smallvec = "1.10.0" +smallvec.workspace = true once_cell = "1.17.0" # local deps diff --git a/crates/ide-assists/Cargo.toml b/crates/ide-assists/Cargo.toml index 3954abfdb7c43..447e38f91f43e 100644 --- a/crates/ide-assists/Cargo.toml +++ b/crates/ide-assists/Cargo.toml @@ -16,7 +16,7 @@ cov-mark = "2.0.0-pre.1" itertools = "0.10.5" either = "1.7.0" -smallvec = "1.10.0" +smallvec.workspace = true # local deps stdx.workspace = true diff --git a/crates/ide-assists/src/handlers/generate_getter.rs b/crates/ide-assists/src/handlers/generate_getter.rs index 15641b448d001..4595cfe29c85d 100644 --- a/crates/ide-assists/src/handlers/generate_getter.rs +++ b/crates/ide-assists/src/handlers/generate_getter.rs @@ -180,7 +180,9 @@ pub(crate) fn generate_getter_impl( // Insert `$0` only for last getter we generate if i == record_fields_count - 1 { - getter_buf = getter_buf.replacen("fn ", "fn $0", 1); + if ctx.config.snippet_cap.is_some() { + getter_buf = getter_buf.replacen("fn ", "fn $0", 1); + } } // For first element we do not merge with '\n', as @@ -330,7 +332,7 @@ fn parse_record_field(record_field: ast::RecordField, mutable: bool) -> Option &Data { + &self.data + } +} +"#, + ); + + check_assist_no_snippet_cap( + generate_getter_mut, + r#" +struct Context { + dat$0a: Data, +} +"#, + r#" +struct Context { + data: Data, +} + +impl Context { + fn data_mut(&mut self) -> &mut Data { + &mut self.data + } +} +"#, + ); + } + #[test] fn test_generate_getter_already_implemented() { check_assist_not_applicable( @@ -433,6 +478,29 @@ impl Context { ); } + #[test] + fn test_generate_getter_from_field_with_visibility_marker_no_snippet_cap() { + check_assist_no_snippet_cap( + generate_getter, + r#" +pub(crate) struct Context { + dat$0a: Data, +} +"#, + r#" +pub(crate) struct Context { + data: Data, +} + +impl Context { + pub(crate) fn data(&self) -> &Data { + &self.data + } +} +"#, + ); + } + #[test] fn test_multiple_generate_getter() { check_assist( @@ -468,6 +536,41 @@ impl Context { ); } + #[test] + fn test_multiple_generate_getter_no_snippet_cap() { + check_assist_no_snippet_cap( + generate_getter, + r#" +struct Context { + data: Data, + cou$0nt: usize, +} + +impl Context { + fn data(&self) -> &Data { + &self.data + } +} +"#, + r#" +struct Context { + data: Data, + count: usize, +} + +impl Context { + fn data(&self) -> &Data { + &self.data + } + + fn count(&self) -> &usize { + &self.count + } +} +"#, + ); + } + #[test] fn test_not_a_special_case() { cov_mark::check_count!(convert_reference_type, 0); diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs index fca268a1f0b2d..94be99fd7abf8 100644 --- a/crates/ide-assists/src/tests.rs +++ b/crates/ide-assists/src/tests.rs @@ -33,6 +33,20 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig { assist_emit_must_use: false, }; +pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig { + snippet_cap: None, + allowed: None, + insert_use: InsertUseConfig { + granularity: ImportGranularity::Crate, + prefix_kind: hir::PrefixKind::Plain, + enforce_granularity: true, + group: true, + skip_glob_imports: true, + }, + prefer_no_std: false, + assist_emit_must_use: false, +}; + pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) { RootDatabase::with_single_file(text) } @@ -43,6 +57,22 @@ pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_ check(assist, ra_fixture_before, ExpectedResult::After(&ra_fixture_after), None); } +#[track_caller] +pub(crate) fn check_assist_no_snippet_cap( + assist: Handler, + ra_fixture_before: &str, + ra_fixture_after: &str, +) { + let ra_fixture_after = trim_indent(ra_fixture_after); + check_with_config( + TEST_CONFIG_NO_SNIPPET_CAP, + assist, + ra_fixture_before, + ExpectedResult::After(&ra_fixture_after), + None, + ); +} + // There is no way to choose what assist within a group you want to test against, // so this is here to allow you choose. pub(crate) fn check_assist_by_label( @@ -119,6 +149,17 @@ enum ExpectedResult<'a> { #[track_caller] fn check(handler: Handler, before: &str, expected: ExpectedResult<'_>, assist_label: Option<&str>) { + check_with_config(TEST_CONFIG, handler, before, expected, assist_label); +} + +#[track_caller] +fn check_with_config( + config: AssistConfig, + handler: Handler, + before: &str, + expected: ExpectedResult<'_>, + assist_label: Option<&str>, +) { let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before); db.set_enable_proc_attr_macros(true); let text_without_caret = db.file_text(file_with_caret_id).to_string(); @@ -126,7 +167,6 @@ fn check(handler: Handler, before: &str, expected: ExpectedResult<'_>, assist_la let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() }; let sema = Semantics::new(&db); - let config = TEST_CONFIG; let ctx = AssistContext::new(sema, &config, frange); let resolve = match expected { ExpectedResult::Unresolved => AssistResolveStrategy::None, diff --git a/crates/ide-completion/Cargo.toml b/crates/ide-completion/Cargo.toml index 34ef092cfc44c..092fb303668fe 100644 --- a/crates/ide-completion/Cargo.toml +++ b/crates/ide-completion/Cargo.toml @@ -16,7 +16,7 @@ cov-mark = "2.0.0-pre.1" itertools = "0.10.5" once_cell = "1.17.0" -smallvec = "1.10.0" +smallvec.workspace = true # local deps diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs index 90c523735da80..c55bd9aaae521 100644 --- a/crates/ide-completion/src/completions/postfix.rs +++ b/crates/ide-completion/src/completions/postfix.rs @@ -747,4 +747,16 @@ fn main() { "#, ); } + + #[test] + fn no_postfix_completions_in_if_block_that_has_an_else() { + check( + r#" +fn test() { + if true {}.$0 else {} +} +"#, + expect![[r#""#]], + ); + } } diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index aa77f449530e5..ea54068b0f8bf 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -571,28 +571,25 @@ impl<'a> CompletionContext<'a> { // try to skip completions on path with invalid colons // this approach works in normal path and inside token tree - match original_token.kind() { - T![:] => { - // return if no prev token before colon - let prev_token = original_token.prev_token()?; - - // only has a single colon - if prev_token.kind() != T![:] { - return None; - } + if original_token.kind() == T![:] { + // return if no prev token before colon + let prev_token = original_token.prev_token()?; - // has 3 colon or 2 coloncolon in a row - // special casing this as per discussion in https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1031845205 - // and https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1032812751 - if prev_token - .prev_token() - .map(|t| t.kind() == T![:] || t.kind() == T![::]) - .unwrap_or(false) - { - return None; - } + // only has a single colon + if prev_token.kind() != T![:] { + return None; + } + + // has 3 colon or 2 coloncolon in a row + // special casing this as per discussion in https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1031845205 + // and https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1032812751 + if prev_token + .prev_token() + .map(|t| t.kind() == T![:] || t.kind() == T![::]) + .unwrap_or(false) + { + return None; } - _ => {} } let AnalysisResult { diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index 4bff665ab1d77..db0045aef6e0b 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -29,6 +29,7 @@ pub(super) struct AnalysisResult { pub(super) analysis: CompletionAnalysis, pub(super) expected: (Option, Option), pub(super) qualifier_ctx: QualifierCtx, + /// the original token of the expanded file pub(super) token: SyntaxToken, pub(super) offset: TextSize, } @@ -213,15 +214,6 @@ fn analyze( let _p = profile::span("CompletionContext::analyze"); let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } = expansion_result; - let syntax_element = NodeOrToken::Token(fake_ident_token); - if is_in_token_of_for_loop(syntax_element.clone()) { - // for pat $0 - // there is nothing to complete here except `in` keyword - // don't bother populating the context - // FIXME: the completion calculations should end up good enough - // such that this special case becomes unnecessary - return None; - } // Overwrite the path kind for derives if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx { @@ -249,37 +241,35 @@ fn analyze( return None; } - let name_like = match find_node_at_offset(&speculative_file, offset) { - Some(it) => it, - None => { - let analysis = if let Some(original) = ast::String::cast(original_token.clone()) { - CompletionAnalysis::String { - original, - expanded: ast::String::cast(self_token.clone()), + let Some(name_like) = find_node_at_offset(&speculative_file, offset) else { + let analysis = if let Some(original) = ast::String::cast(original_token.clone()) { + CompletionAnalysis::String { + original, + expanded: ast::String::cast(self_token.clone()), + } + } else { + // Fix up trailing whitespace problem + // #[attr(foo = $0 + let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?; + let p = token.parent()?; + if p.kind() == SyntaxKind::TOKEN_TREE + && p.ancestors().any(|it| it.kind() == SyntaxKind::META) + { + let colon_prefix = previous_non_trivia_token(self_token.clone()) + .map_or(false, |it| T![:] == it.kind()); + CompletionAnalysis::UnexpandedAttrTT { + fake_attribute_under_caret: fake_ident_token + .parent_ancestors() + .find_map(ast::Attr::cast), + colon_prefix, } } else { - // Fix up trailing whitespace problem - // #[attr(foo = $0 - let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?; - let p = token.parent()?; - if p.kind() == SyntaxKind::TOKEN_TREE - && p.ancestors().any(|it| it.kind() == SyntaxKind::META) - { - let colon_prefix = previous_non_trivia_token(self_token.clone()) - .map_or(false, |it| T![:] == it.kind()); - CompletionAnalysis::UnexpandedAttrTT { - fake_attribute_under_caret: syntax_element - .ancestors() - .find_map(ast::Attr::cast), - colon_prefix, - } - } else { - return None; - } - }; - return Some((analysis, (None, None), QualifierCtx::default())); - } + return None; + } + }; + return Some((analysis, (None, None), QualifierCtx::default())); }; + let expected = expected_type_and_name(sema, self_token, &name_like); let mut qual_ctx = QualifierCtx::default(); let analysis = match name_like { @@ -290,6 +280,22 @@ fn analyze( let parent = name_ref.syntax().parent()?; let (nameref_ctx, qualifier_ctx) = classify_name_ref(sema, &original_file, name_ref, parent)?; + + if let NameRefContext { + kind: + NameRefKind::Path(PathCompletionCtx { kind: PathKind::Expr { .. }, path, .. }, ..), + .. + } = &nameref_ctx + { + if is_in_token_of_for_loop(path) { + // for pat $0 + // there is nothing to complete here except `in` keyword + // don't bother populating the context + // Ideally this special casing wouldn't be needed, but the parser recovers + return None; + } + } + qual_ctx = qualifier_ctx; CompletionAnalysis::NameRef(nameref_ctx) } @@ -323,16 +329,14 @@ fn expected_type_and_name( ast::FieldExpr(e) => e .syntax() .ancestors() - .map_while(ast::FieldExpr::cast) - .last() - .map(|it| it.syntax().clone()), + .take_while(|it| ast::FieldExpr::can_cast(it.kind())) + .last(), ast::PathSegment(e) => e .syntax() .ancestors() .skip(1) .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind())) - .find_map(ast::PathExpr::cast) - .map(|it| it.syntax().clone()), + .find(|it| ast::PathExpr::can_cast(it.kind())), _ => None } }; @@ -605,6 +609,18 @@ fn classify_name_ref( }, _ => false, }; + + let reciever_is_part_of_indivisible_expression = match &receiver { + Some(ast::Expr::IfExpr(_)) => { + let next_token_kind = next_non_trivia_token(name_ref.syntax().clone()).map(|t| t.kind()); + next_token_kind == Some(SyntaxKind::ELSE_KW) + }, + _ => false + }; + if reciever_is_part_of_indivisible_expression { + return None; + } + let kind = NameRefKind::DotAccess(DotAccess { receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)), kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal }, @@ -656,8 +672,15 @@ fn classify_name_ref( }; let after_if_expr = |node: SyntaxNode| { let prev_expr = (|| { + let node = match node.parent().and_then(ast::ExprStmt::cast) { + Some(stmt) => stmt.syntax().clone(), + None => node, + }; let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?; - ast::ExprStmt::cast(prev_sibling)?.expr() + + ast::ExprStmt::cast(prev_sibling.clone()) + .and_then(|it| it.expr()) + .or_else(|| ast::Expr::cast(prev_sibling)) })(); matches!(prev_expr, Some(ast::Expr::IfExpr(_))) }; @@ -1251,40 +1274,29 @@ fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> { Some((use_tree.path()?, true)) } -pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool { +fn is_in_token_of_for_loop(path: &ast::Path) -> bool { // oh my ... (|| { - let syntax_token = element.into_token()?; - let range = syntax_token.text_range(); - let for_expr = syntax_token.parent_ancestors().find_map(ast::ForExpr::cast)?; - - // check if the current token is the `in` token of a for loop - if let Some(token) = for_expr.in_token() { - return Some(syntax_token == token); + let expr = path.syntax().parent().and_then(ast::PathExpr::cast)?; + let for_expr = expr.syntax().parent().and_then(ast::ForExpr::cast)?; + if for_expr.in_token().is_some() { + return Some(false); } let pat = for_expr.pat()?; - if range.end() < pat.syntax().text_range().end() { - // if we are inside or before the pattern we can't be at the `in` token position - return None; - } let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?; Some(match next_sibl { - // the loop body is some node, if our token is at the start we are at the `in` position, - // otherwise we could be in a recovered expression, we don't wanna ruin completions there - syntax::NodeOrToken::Node(n) => n.text_range().start() == range.start(), - // the loop body consists of a single token, if we are this we are certainly at the `in` token position - syntax::NodeOrToken::Token(t) => t == syntax_token, + syntax::NodeOrToken::Node(n) => { + n.text_range().start() == path.syntax().text_range().start() + } + syntax::NodeOrToken::Token(t) => { + t.text_range().start() == path.syntax().text_range().start() + } }) })() .unwrap_or(false) } -#[test] -fn test_for_is_prev2() { - crate::tests::check_pattern_is_applicable(r"fn __() { for i i$0 }", is_in_token_of_for_loop); -} - -pub(crate) fn is_in_loop_body(node: &SyntaxNode) -> bool { +fn is_in_loop_body(node: &SyntaxNode) -> bool { node.ancestors() .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR) .find_map(|it| { @@ -1317,6 +1329,22 @@ fn previous_non_trivia_token(e: impl Into) -> Option None } +fn next_non_trivia_token(e: impl Into) -> Option { + let mut token = match e.into() { + SyntaxElement::Node(n) => n.last_token()?, + SyntaxElement::Token(t) => t, + } + .next_token(); + while let Some(inner) = token { + if !inner.kind().is_trivia() { + return Some(inner); + } else { + token = inner.next_token(); + } + } + None +} + fn next_non_trivia_sibling(ele: SyntaxElement) -> Option { let mut e = ele.next_sibling_or_token(); while let Some(inner) = e { diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index 657eab5b1b830..2f65491d85e14 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -14,13 +14,14 @@ use crate::{ render::{render_path_resolution, RenderContext}, }; -/// `CompletionItem` describes a single completion variant in the editor pop-up. -/// It is basically a POD with various properties. To construct a -/// `CompletionItem`, use `new` method and the `Builder` struct. +/// `CompletionItem` describes a single completion entity which expands to 1 or more entries in the +/// editor pop-up. It is basically a POD with various properties. To construct a +/// [`CompletionItem`], use [`Builder::new`] method and the [`Builder`] struct. #[derive(Clone)] +#[non_exhaustive] pub struct CompletionItem { /// Label in the completion pop up which identifies completion. - label: SmolStr, + pub label: SmolStr, /// Range of identifier that is being completed. /// /// It should be used primarily for UI, but we also use this to convert @@ -29,33 +30,33 @@ pub struct CompletionItem { /// `source_range` must contain the completion offset. `text_edit` should /// start with what `source_range` points to, or VSCode will filter out the /// completion silently. - source_range: TextRange, + pub source_range: TextRange, /// What happens when user selects this item. /// /// Typically, replaces `source_range` with new identifier. - text_edit: TextEdit, - is_snippet: bool, + pub text_edit: TextEdit, + pub is_snippet: bool, /// What item (struct, function, etc) are we completing. - kind: CompletionItemKind, + pub kind: CompletionItemKind, /// Lookup is used to check if completion item indeed can complete current /// ident. /// /// That is, in `foo.bar$0` lookup of `abracadabra` will be accepted (it /// contains `bar` sub sequence), and `quux` will rejected. - lookup: Option, + pub lookup: Option, /// Additional info to show in the UI pop up. - detail: Option, - documentation: Option, + pub detail: Option, + pub documentation: Option, /// Whether this item is marked as deprecated - deprecated: bool, + pub deprecated: bool, /// If completing a function call, ask the editor to show parameter popup /// after completion. - trigger_call_info: bool, + pub trigger_call_info: bool, /// We use this to sort completion. Relevance records facts like "do the /// types align precisely?". We can't sort by relevances directly, they are @@ -64,36 +65,39 @@ pub struct CompletionItem { /// Note that Relevance ignores fuzzy match score. We compute Relevance for /// all possible items, and then separately build an ordered completion list /// based on relevance and fuzzy matching with the already typed identifier. - relevance: CompletionRelevance, + pub relevance: CompletionRelevance, /// Indicates that a reference or mutable reference to this variable is a /// possible match. - ref_match: Option<(Mutability, TextSize)>, + // FIXME: We shouldn't expose Mutability here (that is HIR types at all), its fine for now though + // until we have more splitting completions in which case we should think about + // generalizing this. See https://github.com/rust-lang/rust-analyzer/issues/12571 + pub ref_match: Option<(Mutability, TextSize)>, /// The import data to add to completion's edits. - import_to_add: SmallVec<[LocatedImport; 1]>, + pub import_to_add: SmallVec<[LocatedImport; 1]>, } // We use custom debug for CompletionItem to make snapshot tests more readable. impl fmt::Debug for CompletionItem { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut s = f.debug_struct("CompletionItem"); - s.field("label", &self.label()).field("source_range", &self.source_range()); - if self.text_edit().len() == 1 { - let atom = &self.text_edit().iter().next().unwrap(); + s.field("label", &self.label).field("source_range", &self.source_range); + if self.text_edit.len() == 1 { + let atom = &self.text_edit.iter().next().unwrap(); s.field("delete", &atom.delete); s.field("insert", &atom.insert); } else { s.field("text_edit", &self.text_edit); } - s.field("kind", &self.kind()); - if self.lookup() != self.label() { + s.field("kind", &self.kind); + if self.lookup() != self.label { s.field("lookup", &self.lookup()); } - if let Some(detail) = self.detail() { + if let Some(detail) = &self.detail { s.field("detail", &detail); } - if let Some(documentation) = self.documentation() { + if let Some(documentation) = &self.documentation { s.field("documentation", &documentation); } if self.deprecated { @@ -351,63 +355,25 @@ impl CompletionItem { } } - /// What user sees in pop-up in the UI. - pub fn label(&self) -> &str { - &self.label - } - pub fn source_range(&self) -> TextRange { - self.source_range - } - - pub fn text_edit(&self) -> &TextEdit { - &self.text_edit - } - /// Whether `text_edit` is a snippet (contains `$0` markers). - pub fn is_snippet(&self) -> bool { - self.is_snippet - } - - /// Short one-line additional information, like a type - pub fn detail(&self) -> Option<&str> { - self.detail.as_deref() - } - /// A doc-comment - pub fn documentation(&self) -> Option { - self.documentation.clone() - } /// What string is used for filtering. pub fn lookup(&self) -> &str { self.lookup.as_deref().unwrap_or(&self.label) } - pub fn kind(&self) -> CompletionItemKind { - self.kind - } - - pub fn deprecated(&self) -> bool { - self.deprecated - } - - pub fn relevance(&self) -> CompletionRelevance { - self.relevance - } - - pub fn trigger_call_info(&self) -> bool { - self.trigger_call_info - } - - pub fn ref_match(&self) -> Option<(Mutability, TextSize, CompletionRelevance)> { + pub fn ref_match(&self) -> Option<(String, text_edit::Indel, CompletionRelevance)> { // Relevance of the ref match should be the same as the original // match, but with exact type match set because self.ref_match // is only set if there is an exact type match. let mut relevance = self.relevance; relevance.type_match = Some(CompletionRelevanceTypeMatch::Exact); - self.ref_match.map(|(mutability, offset)| (mutability, offset, relevance)) - } - - pub fn imports_to_add(&self) -> &[LocatedImport] { - &self.import_to_add + self.ref_match.map(|(mutability, offset)| { + ( + format!("&{}{}", mutability.as_keyword_for_ref(), self.label), + text_edit::Indel::insert(offset, format!("&{}", mutability.as_keyword_for_ref())), + relevance, + ) + }) } } diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index 4b48ec6bc3393..6fe7811140399 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -156,13 +156,15 @@ pub fn completions( // prevent `(` from triggering unwanted completion noise if trigger_character == Some('(') { - if let CompletionAnalysis::NameRef(NameRefContext { kind, .. }) = &analysis { - if let NameRefKind::Path( - path_ctx @ PathCompletionCtx { kind: PathKind::Vis { has_in_token }, .. }, - ) = kind - { - completions::vis::complete_vis_path(&mut completions, ctx, path_ctx, has_in_token); - } + if let CompletionAnalysis::NameRef(NameRefContext { + kind: + NameRefKind::Path( + path_ctx @ PathCompletionCtx { kind: PathKind::Vis { has_in_token }, .. }, + ), + .. + }) = analysis + { + completions::vis::complete_vis_path(&mut completions, ctx, path_ctx, has_in_token); } return Some(completions.into()); } @@ -170,7 +172,7 @@ pub fn completions( { let acc = &mut completions; - match &analysis { + match analysis { CompletionAnalysis::Name(name_ctx) => completions::complete_name(acc, ctx, name_ctx), CompletionAnalysis::NameRef(name_ref_ctx) => { completions::complete_name_ref(acc, ctx, name_ref_ctx) diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index d6476c10258ec..d99ad5f9f04ba 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -503,18 +503,18 @@ mod tests { #[track_caller] fn check_relevance_for_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) { let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None); - actual.retain(|it| kinds.contains(&it.kind())); - actual.sort_by_key(|it| cmp::Reverse(it.relevance().score())); + actual.retain(|it| kinds.contains(&it.kind)); + actual.sort_by_key(|it| cmp::Reverse(it.relevance.score())); check_relevance_(actual, expect); } #[track_caller] fn check_relevance(ra_fixture: &str, expect: Expect) { let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None); - actual.retain(|it| it.kind() != CompletionItemKind::Snippet); - actual.retain(|it| it.kind() != CompletionItemKind::Keyword); - actual.retain(|it| it.kind() != CompletionItemKind::BuiltinType); - actual.sort_by_key(|it| cmp::Reverse(it.relevance().score())); + actual.retain(|it| it.kind != CompletionItemKind::Snippet); + actual.retain(|it| it.kind != CompletionItemKind::Keyword); + actual.retain(|it| it.kind != CompletionItemKind::BuiltinType); + actual.sort_by_key(|it| cmp::Reverse(it.relevance.score())); check_relevance_(actual, expect); } @@ -525,12 +525,11 @@ mod tests { .flat_map(|it| { let mut items = vec![]; - let tag = it.kind().tag(); - let relevance = display_relevance(it.relevance()); - items.push(format!("{tag} {} {relevance}\n", it.label())); + let tag = it.kind.tag(); + let relevance = display_relevance(it.relevance); + items.push(format!("{tag} {} {relevance}\n", it.label)); - if let Some((mutability, _offset, relevance)) = it.ref_match() { - let label = format!("&{}{}", mutability.as_keyword_for_ref(), it.label()); + if let Some((label, _indel, relevance)) = it.ref_match() { let relevance = display_relevance(relevance); items.push(format!("{tag} {label} {relevance}\n")); @@ -587,6 +586,7 @@ fn main() { Foo::Fo$0 } ), lookup: "Foo{}", detail: "Foo { x: i32, y: i32 }", + trigger_call_info: true, }, ] "#]], @@ -614,6 +614,7 @@ fn main() { Foo::Fo$0 } ), lookup: "Foo()", detail: "Foo(i32, i32)", + trigger_call_info: true, }, ] "#]], @@ -679,6 +680,7 @@ fn main() { Foo::Fo$0 } Variant, ), detail: "Foo", + trigger_call_info: true, }, ] "#]], @@ -745,6 +747,7 @@ fn main() { let _: m::Spam = S$0 } postfix_match: None, is_definite: false, }, + trigger_call_info: true, }, CompletionItem { label: "m::Spam::Foo", @@ -770,6 +773,7 @@ fn main() { let _: m::Spam = S$0 } postfix_match: None, is_definite: false, }, + trigger_call_info: true, }, ] "#]], @@ -942,6 +946,7 @@ use self::E::*; documentation: Documentation( "variant docs", ), + trigger_call_info: true, }, CompletionItem { label: "E", diff --git a/crates/ide-completion/src/render/literal.rs b/crates/ide-completion/src/render/literal.rs index 64dab02f7c5ce..ed78fcd8e6529 100644 --- a/crates/ide-completion/src/render/literal.rs +++ b/crates/ide-completion/src/render/literal.rs @@ -113,7 +113,7 @@ fn render( item.detail(rendered.detail); match snippet_cap { - Some(snippet_cap) => item.insert_snippet(snippet_cap, rendered.literal), + Some(snippet_cap) => item.insert_snippet(snippet_cap, rendered.literal).trigger_call_info(), None => item.insert_text(rendered.literal), }; diff --git a/crates/ide-completion/src/render/union_literal.rs b/crates/ide-completion/src/render/union_literal.rs index 1b09ad1731f9d..6e0c53ec94c43 100644 --- a/crates/ide-completion/src/render/union_literal.rs +++ b/crates/ide-completion/src/render/union_literal.rs @@ -72,7 +72,7 @@ pub(crate) fn render_union_literal( .set_relevance(ctx.completion_relevance()); match ctx.snippet_cap() { - Some(snippet_cap) => item.insert_snippet(snippet_cap, literal), + Some(snippet_cap) => item.insert_snippet(snippet_cap, literal).trigger_call_info(), None => item.insert_text(literal), }; diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs index 540b0fd0ef7d9..1fe48b9e96f9d 100644 --- a/crates/ide-completion/src/tests.rs +++ b/crates/ide-completion/src/tests.rs @@ -23,7 +23,7 @@ mod type_pos; mod use_tree; mod visibility; -use hir::{db::DefDatabase, PrefixKind, Semantics}; +use hir::{db::DefDatabase, PrefixKind}; use ide_db::{ base_db::{fixture::ChangeFixture, FileLoader, FilePosition}, imports::insert_use::{ImportGranularity, InsertUseConfig}, @@ -31,7 +31,6 @@ use ide_db::{ }; use itertools::Itertools; use stdx::{format_to, trim_indent}; -use syntax::{AstNode, NodeOrToken, SyntaxElement}; use test_utils::assert_eq_text; use crate::{ @@ -109,10 +108,10 @@ fn completion_list_with_config( let items = get_all_items(config, ra_fixture, trigger_character); let items = items .into_iter() - .filter(|it| it.kind() != CompletionItemKind::BuiltinType || it.label() == "u32") - .filter(|it| include_keywords || it.kind() != CompletionItemKind::Keyword) - .filter(|it| include_keywords || it.kind() != CompletionItemKind::Snippet) - .sorted_by_key(|it| (it.kind(), it.label().to_owned(), it.detail().map(ToOwned::to_owned))) + .filter(|it| it.kind != CompletionItemKind::BuiltinType || it.label == "u32") + .filter(|it| include_keywords || it.kind != CompletionItemKind::Keyword) + .filter(|it| include_keywords || it.kind != CompletionItemKind::Snippet) + .sorted_by_key(|it| (it.kind, it.label.clone(), it.detail.as_ref().map(ToOwned::to_owned))) .collect(); render_completion_list(items) } @@ -139,8 +138,8 @@ pub(crate) fn do_completion_with_config( ) -> Vec { get_all_items(config, code, None) .into_iter() - .filter(|c| c.kind() == kind) - .sorted_by(|l, r| l.label().cmp(r.label())) + .filter(|c| c.kind == kind) + .sorted_by(|l, r| l.label.cmp(&r.label)) .collect() } @@ -149,18 +148,18 @@ fn render_completion_list(completions: Vec) -> String { s.chars().count() } let label_width = - completions.iter().map(|it| monospace_width(it.label())).max().unwrap_or_default().min(22); + completions.iter().map(|it| monospace_width(&it.label)).max().unwrap_or_default().min(22); completions .into_iter() .map(|it| { - let tag = it.kind().tag(); - let var_name = format!("{tag} {}", it.label()); + let tag = it.kind.tag(); + let var_name = format!("{tag} {}", it.label); let mut buf = var_name; - if let Some(detail) = it.detail() { - let width = label_width.saturating_sub(monospace_width(it.label())); + if let Some(detail) = it.detail { + let width = label_width.saturating_sub(monospace_width(&it.label)); format_to!(buf, "{:width$} {}", "", detail, width = width); } - if it.deprecated() { + if it.deprecated { format_to!(buf, " DEPRECATED"); } format_to!(buf, "\n"); @@ -192,13 +191,13 @@ pub(crate) fn check_edit_with_config( .unwrap_or_else(|| panic!("can't find {what:?} completion in {completions:#?}")); let mut actual = db.file_text(position.file_id).to_string(); - let mut combined_edit = completion.text_edit().to_owned(); + let mut combined_edit = completion.text_edit.clone(); resolve_completion_edits( &db, &config, position, - completion.imports_to_add().iter().filter_map(|import_edit| { + completion.import_to_add.iter().filter_map(|import_edit| { let import_path = &import_edit.import_path; let import_name = import_path.segments().last()?; Some((import_path.to_string(), import_name.to_string())) @@ -216,15 +215,6 @@ pub(crate) fn check_edit_with_config( assert_eq_text!(&ra_fixture_after, &actual) } -pub(crate) fn check_pattern_is_applicable(code: &str, check: impl FnOnce(SyntaxElement) -> bool) { - let (db, pos) = position(code); - - let sema = Semantics::new(&db); - let original_file = sema.parse(pos.file_id); - let token = original_file.syntax().token_at_offset(pos.offset).left_biased().unwrap(); - assert!(check(NodeOrToken::Token(token))); -} - pub(crate) fn get_all_items( config: CompletionConfig, code: &str, @@ -235,7 +225,7 @@ pub(crate) fn get_all_items( .map_or_else(Vec::default, Into::into); // validate res.iter().for_each(|it| { - let sr = it.source_range(); + let sr = it.source_range; assert!( sr.contains_inclusive(position.offset), "source range {sr:?} does not contain the offset {:?} of the completion request: {it:?}", @@ -246,8 +236,9 @@ pub(crate) fn get_all_items( } #[test] -fn test_no_completions_required() { +fn test_no_completions_in_for_loop_in_kw_pos() { assert_eq!(completion_list(r#"fn foo() { for i i$0 }"#), String::new()); + assert_eq!(completion_list(r#"fn foo() { for i in$0 }"#), String::new()); } #[test] diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index 043f552bd8a4c..c1c6a689eb18a 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs @@ -745,3 +745,255 @@ fn return_value_no_block() { r#"fn f() -> i32 { match () { () => return $0 } }"#, ); } + +#[test] +fn else_completion_after_if() { + check_empty( + r#" +fn foo() { if foo {} $0 } +"#, + expect![[r#" + fn foo() fn() + bt u32 + kw const + kw crate:: + kw else + kw else if + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + check_empty( + r#" +fn foo() { if foo {} el$0 } +"#, + expect![[r#" + fn foo() fn() + bt u32 + kw const + kw crate:: + kw else + kw else if + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + check_empty( + r#" +fn foo() { bar(if foo {} $0) } +"#, + expect![[r#" + fn foo() fn() + bt u32 + kw crate:: + kw else + kw else if + kw false + kw for + kw if + kw if let + kw loop + kw match + kw return + kw self:: + kw true + kw unsafe + kw while + kw while let + "#]], + ); + check_empty( + r#" +fn foo() { bar(if foo {} el$0) } +"#, + expect![[r#" + fn foo() fn() + bt u32 + kw crate:: + kw else + kw else if + kw false + kw for + kw if + kw if let + kw loop + kw match + kw return + kw self:: + kw true + kw unsafe + kw while + kw while let + "#]], + ); + check_empty( + r#" +fn foo() { if foo {} $0 let x = 92; } +"#, + expect![[r#" + fn foo() fn() + bt u32 + kw const + kw crate:: + kw else + kw else if + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + check_empty( + r#" +fn foo() { if foo {} el$0 let x = 92; } +"#, + expect![[r#" + fn foo() fn() + bt u32 + kw const + kw crate:: + kw else + kw else if + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + check_empty( + r#" +fn foo() { if foo {} el$0 { let x = 92; } } +"#, + expect![[r#" + fn foo() fn() + bt u32 + kw const + kw crate:: + kw else + kw else if + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); +} diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index 6052b0623204e..cb71c7b2bdef3 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -2,13 +2,20 @@ use expect_test::{expect, Expect}; -use crate::tests::{check_edit, completion_list_no_kw, completion_list_with_trigger_character}; +use crate::tests::{ + check_edit, completion_list, completion_list_no_kw, completion_list_with_trigger_character, +}; -fn check(ra_fixture: &str, expect: Expect) { +fn check_no_kw(ra_fixture: &str, expect: Expect) { let actual = completion_list_no_kw(ra_fixture); expect.assert_eq(&actual) } +fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture); + expect.assert_eq(&actual) +} + pub(crate) fn check_with_trigger_character( ra_fixture: &str, trigger_character: Option, @@ -59,7 +66,7 @@ fn _alpha() {} #[test] fn completes_prelude() { - check( + check_no_kw( r#" //- /main.rs edition:2018 crate:main deps:std fn foo() { let x: $0 } @@ -81,7 +88,7 @@ pub mod prelude { #[test] fn completes_prelude_macros() { - check( + check_no_kw( r#" //- /main.rs edition:2018 crate:main deps:std fn f() {$0} @@ -110,7 +117,7 @@ mod macros { #[test] fn completes_std_prelude_if_core_is_defined() { - check( + check_no_kw( r#" //- /main.rs crate:main deps:core,std fn foo() { let x: $0 } @@ -140,7 +147,7 @@ pub mod prelude { #[test] fn respects_doc_hidden() { - check( + check_no_kw( r#" //- /lib.rs crate:lib deps:std fn f() { @@ -168,7 +175,7 @@ pub mod prelude { #[test] fn respects_doc_hidden_in_assoc_item_list() { - check( + check_no_kw( r#" //- /lib.rs crate:lib deps:std struct S; @@ -195,7 +202,7 @@ pub mod prelude { #[test] fn associated_item_visibility() { - check( + check_no_kw( r#" //- /lib.rs crate:lib new_source_root:library pub struct S; @@ -222,7 +229,7 @@ fn foo() { let _ = lib::S::$0 } #[test] fn completes_union_associated_method() { - check( + check_no_kw( r#" union U {}; impl U { fn m() { } } @@ -237,7 +244,7 @@ fn foo() { let _ = U::$0 } #[test] fn completes_trait_associated_method_1() { - check( + check_no_kw( r#" trait Trait { fn m(); } @@ -251,7 +258,7 @@ fn foo() { let _ = Trait::$0 } #[test] fn completes_trait_associated_method_2() { - check( + check_no_kw( r#" trait Trait { fn m(); } @@ -268,7 +275,7 @@ fn foo() { let _ = S::$0 } #[test] fn completes_trait_associated_method_3() { - check( + check_no_kw( r#" trait Trait { fn m(); } @@ -285,7 +292,7 @@ fn foo() { let _ = ::$0 } #[test] fn completes_ty_param_assoc_ty() { - check( + check_no_kw( r#" trait Super { type Ty; @@ -318,7 +325,7 @@ fn foo() { T::$0 } #[test] fn completes_self_param_assoc_ty() { - check( + check_no_kw( r#" trait Super { type Ty; @@ -358,7 +365,7 @@ impl Sub for Wrap { #[test] fn completes_type_alias() { - check( + check_no_kw( r#" struct S; impl S { fn foo() {} } @@ -376,7 +383,7 @@ fn main() { T::$0; } #[test] fn completes_qualified_macros() { - check( + check_no_kw( r#" #[macro_export] macro_rules! foo { () => {} } @@ -392,7 +399,7 @@ fn main() { let _ = crate::$0 } #[test] fn does_not_complete_non_fn_macros() { - check( + check_no_kw( r#" mod m { #[rustc_builtin_macro] @@ -403,7 +410,7 @@ fn f() {m::$0} "#, expect![[r#""#]], ); - check( + check_no_kw( r#" mod m { #[rustc_builtin_macro] @@ -418,7 +425,7 @@ fn f() {m::$0} #[test] fn completes_reexported_items_under_correct_name() { - check( + check_no_kw( r#" fn foo() { self::m::$0 } @@ -475,7 +482,7 @@ mod p { #[test] fn completes_in_simple_macro_call() { - check( + check_no_kw( r#" macro_rules! m { ($e:expr) => { $e } } fn main() { m!(self::f$0); } @@ -490,7 +497,7 @@ fn foo() {} #[test] fn function_mod_share_name() { - check( + check_no_kw( r#" fn foo() { self::m::$0 } @@ -508,7 +515,7 @@ mod m { #[test] fn completes_hashmap_new() { - check( + check_no_kw( r#" struct RandomState; struct HashMap {} @@ -529,7 +536,7 @@ fn foo() { #[test] fn completes_variant_through_self() { cov_mark::check!(completes_variant_through_self); - check( + check_no_kw( r#" enum Foo { Bar, @@ -552,7 +559,7 @@ impl Foo { #[test] fn completes_non_exhaustive_variant_within_the_defining_crate() { - check( + check_no_kw( r#" enum Foo { #[non_exhaustive] @@ -570,7 +577,7 @@ fn foo(self) { "#]], ); - check( + check_no_kw( r#" //- /main.rs crate:main deps:e fn foo(self) { @@ -593,7 +600,7 @@ enum Foo { #[test] fn completes_primitive_assoc_const() { cov_mark::check!(completes_primitive_assoc_const); - check( + check_no_kw( r#" //- /lib.rs crate:lib deps:core fn f() { @@ -618,7 +625,7 @@ impl u8 { #[test] fn completes_variant_through_alias() { cov_mark::check!(completes_variant_through_alias); - check( + check_no_kw( r#" enum Foo { Bar @@ -636,7 +643,7 @@ fn main() { #[test] fn respects_doc_hidden2() { - check( + check_no_kw( r#" //- /lib.rs crate:lib deps:dep fn f() { @@ -665,7 +672,7 @@ pub mod m {} #[test] fn type_anchor_empty() { - check( + check_no_kw( r#" trait Foo { fn foo() -> Self; @@ -688,7 +695,7 @@ fn bar() -> Bar { #[test] fn type_anchor_type() { - check( + check_no_kw( r#" trait Foo { fn foo() -> Self; @@ -715,7 +722,7 @@ fn bar() -> Bar { #[test] fn type_anchor_type_trait() { - check( + check_no_kw( r#" trait Foo { fn foo() -> Self; @@ -741,7 +748,7 @@ fn bar() -> Bar { #[test] fn completes_fn_in_pub_trait_generated_by_macro() { - check( + check_no_kw( r#" mod other_mod { macro_rules! make_method { @@ -775,7 +782,7 @@ fn main() { #[test] fn completes_fn_in_pub_trait_generated_by_recursive_macro() { - check( + check_no_kw( r#" mod other_mod { macro_rules! make_method { @@ -815,7 +822,7 @@ fn main() { #[test] fn completes_const_in_pub_trait_generated_by_macro() { - check( + check_no_kw( r#" mod other_mod { macro_rules! make_const { @@ -847,7 +854,7 @@ fn main() { #[test] fn completes_locals_from_macros() { - check( + check_no_kw( r#" macro_rules! x { @@ -875,7 +882,7 @@ fn main() { #[test] fn regression_12644() { - check( + check_no_kw( r#" macro_rules! __rust_force_expr { ($e:expr) => { @@ -974,7 +981,7 @@ fn foo { crate:::$0 } "#, expect![""], ); - check( + check_no_kw( r#" fn foo { crate::::$0 } "#, diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml index 9672bb9b7b59a..57daaf623df29 100644 --- a/crates/ide-db/Cargo.toml +++ b/crates/ide-db/Cargo.toml @@ -37,8 +37,9 @@ text-edit.workspace = true hir.workspace = true [dev-dependencies] -xshell = "0.2.2" expect-test = "1.4.0" +oorandom = "11.1.3" +xshell = "0.2.2" # local deps test-utils.workspace = true diff --git a/crates/ide-db/src/active_parameter.rs b/crates/ide-db/src/active_parameter.rs index 7109c6fd188f5..2b6b60547b355 100644 --- a/crates/ide-db/src/active_parameter.rs +++ b/crates/ide-db/src/active_parameter.rs @@ -2,9 +2,10 @@ use either::Either; use hir::{Semantics, Type}; +use parser::T; use syntax::{ ast::{self, HasArgList, HasName}, - AstNode, SyntaxToken, + match_ast, AstNode, NodeOrToken, SyntaxToken, }; use crate::RootDatabase; @@ -58,7 +59,7 @@ pub fn callable_for_node( calling_node: &ast::CallableExpr, token: &SyntaxToken, ) -> Option<(hir::Callable, Option)> { - let callable = match &calling_node { + let callable = match calling_node { ast::CallableExpr::Call(call) => { let expr = call.expr()?; sema.type_of_expr(&expr)?.adjusted().as_callable(sema.db) @@ -66,13 +67,78 @@ pub fn callable_for_node( ast::CallableExpr::MethodCall(call) => sema.resolve_method_call_as_callable(call), }?; let active_param = if let Some(arg_list) = calling_node.arg_list() { - let param = arg_list - .args() - .take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start()) - .count(); - Some(param) + Some( + arg_list + .syntax() + .children_with_tokens() + .filter_map(NodeOrToken::into_token) + .filter(|t| t.kind() == T![,]) + .take_while(|t| t.text_range().start() <= token.text_range().start()) + .count(), + ) } else { None }; Some((callable, active_param)) } + +pub fn generic_def_for_node( + sema: &Semantics<'_, RootDatabase>, + generic_arg_list: &ast::GenericArgList, + token: &SyntaxToken, +) -> Option<(hir::GenericDef, usize, bool)> { + let parent = generic_arg_list.syntax().parent()?; + let def = match_ast! { + match parent { + ast::PathSegment(ps) => { + let res = sema.resolve_path(&ps.parent_path())?; + let generic_def: hir::GenericDef = match res { + hir::PathResolution::Def(hir::ModuleDef::Adt(it)) => it.into(), + hir::PathResolution::Def(hir::ModuleDef::Function(it)) => it.into(), + hir::PathResolution::Def(hir::ModuleDef::Trait(it)) => it.into(), + hir::PathResolution::Def(hir::ModuleDef::TypeAlias(it)) => it.into(), + hir::PathResolution::Def(hir::ModuleDef::Variant(it)) => it.into(), + hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_)) + | hir::PathResolution::Def(hir::ModuleDef::Const(_)) + | hir::PathResolution::Def(hir::ModuleDef::Macro(_)) + | hir::PathResolution::Def(hir::ModuleDef::Module(_)) + | hir::PathResolution::Def(hir::ModuleDef::Static(_)) => return None, + hir::PathResolution::BuiltinAttr(_) + | hir::PathResolution::ToolModule(_) + | hir::PathResolution::Local(_) + | hir::PathResolution::TypeParam(_) + | hir::PathResolution::ConstParam(_) + | hir::PathResolution::SelfType(_) + | hir::PathResolution::DeriveHelper(_) => return None, + }; + + generic_def + }, + ast::AssocTypeArg(_) => { + // FIXME: We don't record the resolutions for this anywhere atm + return None; + }, + ast::MethodCallExpr(mcall) => { + // recv.method::<$0>() + let method = sema.resolve_method_call(&mcall)?; + method.into() + }, + _ => return None, + } + }; + + let active_param = generic_arg_list + .syntax() + .children_with_tokens() + .filter_map(NodeOrToken::into_token) + .filter(|t| t.kind() == T![,]) + .take_while(|t| t.text_range().start() <= token.text_range().start()) + .count(); + + let first_arg_is_non_lifetime = generic_arg_list + .generic_args() + .next() + .map_or(false, |arg| !matches!(arg, ast::GenericArg::LifetimeArg(_))); + + Some((def, active_param, first_arg_is_non_lifetime)) +} diff --git a/crates/ide-db/src/line_index.rs b/crates/ide-db/src/line_index.rs index 8f12ab334094a..16814a1e636fa 100644 --- a/crates/ide-db/src/line_index.rs +++ b/crates/ide-db/src/line_index.rs @@ -7,56 +7,72 @@ use syntax::{TextRange, TextSize}; #[derive(Clone, Debug, PartialEq, Eq)] pub struct LineIndex { - /// Offset the the beginning of each line, zero-based + /// Offset the beginning of each line, zero-based. pub(crate) newlines: Vec, - /// List of non-ASCII characters on each line - pub(crate) utf16_lines: NoHashHashMap>, + /// List of non-ASCII characters on each line. + pub(crate) line_wide_chars: NoHashHashMap>, } +/// Line/Column information in native, utf8 format. #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct LineColUtf16 { +pub struct LineCol { /// Zero-based pub line: u32, - /// Zero-based + /// Zero-based utf8 offset pub col: u32, } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct LineCol { +pub enum WideEncoding { + Utf16, + Utf32, +} + +/// Line/Column information in legacy encodings. +/// +/// Deliberately not a generic type and different from `LineCol`. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct WideLineCol { /// Zero-based pub line: u32, - /// Zero-based utf8 offset + /// Zero-based pub col: u32, } #[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub(crate) struct Utf16Char { +pub(crate) struct WideChar { /// Start offset of a character inside a line, zero-based pub(crate) start: TextSize, /// End offset of a character inside a line, zero-based pub(crate) end: TextSize, } -impl Utf16Char { +impl WideChar { /// Returns the length in 8-bit UTF-8 code units. fn len(&self) -> TextSize { self.end - self.start } - /// Returns the length in 16-bit UTF-16 code units. - fn len_utf16(&self) -> usize { - if self.len() == TextSize::from(4) { - 2 - } else { - 1 + /// Returns the length in UTF-16 or UTF-32 code units. + fn wide_len(&self, enc: WideEncoding) -> usize { + match enc { + WideEncoding::Utf16 => { + if self.len() == TextSize::from(4) { + 2 + } else { + 1 + } + } + + WideEncoding::Utf32 => 1, } } } impl LineIndex { pub fn new(text: &str) -> LineIndex { - let mut utf16_lines = NoHashHashMap::default(); - let mut utf16_chars = Vec::new(); + let mut line_wide_chars = NoHashHashMap::default(); + let mut wide_chars = Vec::new(); let mut newlines = Vec::with_capacity(16); newlines.push(TextSize::from(0)); @@ -71,8 +87,8 @@ impl LineIndex { newlines.push(curr_row); // Save any utf-16 characters seen in the previous line - if !utf16_chars.is_empty() { - utf16_lines.insert(line, mem::take(&mut utf16_chars)); + if !wide_chars.is_empty() { + line_wide_chars.insert(line, mem::take(&mut wide_chars)); } // Prepare for processing the next line @@ -82,18 +98,18 @@ impl LineIndex { } if !c.is_ascii() { - utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + c_len }); + wide_chars.push(WideChar { start: curr_col, end: curr_col + c_len }); } curr_col += c_len; } // Save any utf-16 characters seen in the last line - if !utf16_chars.is_empty() { - utf16_lines.insert(line, utf16_chars); + if !wide_chars.is_empty() { + line_wide_chars.insert(line, wide_chars); } - LineIndex { newlines, utf16_lines } + LineIndex { newlines, line_wide_chars } } pub fn line_col(&self, offset: TextSize) -> LineCol { @@ -109,13 +125,13 @@ impl LineIndex { .map(|offset| offset + TextSize::from(line_col.col)) } - pub fn to_utf16(&self, line_col: LineCol) -> LineColUtf16 { - let col = self.utf8_to_utf16_col(line_col.line, line_col.col.into()); - LineColUtf16 { line: line_col.line, col: col as u32 } + pub fn to_wide(&self, enc: WideEncoding, line_col: LineCol) -> WideLineCol { + let col = self.utf8_to_wide_col(enc, line_col.line, line_col.col.into()); + WideLineCol { line: line_col.line, col: col as u32 } } - pub fn to_utf8(&self, line_col: LineColUtf16) -> LineCol { - let col = self.utf16_to_utf8_col(line_col.line, line_col.col); + pub fn to_utf8(&self, enc: WideEncoding, line_col: WideLineCol) -> LineCol { + let col = self.wide_to_utf8_col(enc, line_col.line, line_col.col); LineCol { line: line_col.line, col: col.into() } } @@ -132,12 +148,12 @@ impl LineIndex { .filter(|it| !it.is_empty()) } - fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> usize { + fn utf8_to_wide_col(&self, enc: WideEncoding, line: u32, col: TextSize) -> usize { let mut res: usize = col.into(); - if let Some(utf16_chars) = self.utf16_lines.get(&line) { - for c in utf16_chars { + if let Some(wide_chars) = self.line_wide_chars.get(&line) { + for c in wide_chars { if c.end <= col { - res -= usize::from(c.len()) - c.len_utf16(); + res -= usize::from(c.len()) - c.wide_len(enc); } else { // From here on, all utf16 characters come *after* the character we are mapping, // so we don't need to take them into account @@ -148,11 +164,11 @@ impl LineIndex { res } - fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize { - if let Some(utf16_chars) = self.utf16_lines.get(&line) { - for c in utf16_chars { + fn wide_to_utf8_col(&self, enc: WideEncoding, line: u32, mut col: u32) -> TextSize { + if let Some(wide_chars) = self.line_wide_chars.get(&line) { + for c in wide_chars { if col > u32::from(c.start) { - col += u32::from(c.len()) - c.len_utf16() as u32; + col += u32::from(c.len()) - c.wide_len(enc) as u32; } else { // From here on, all utf16 characters come *after* the character we are mapping, // so we don't need to take them into account @@ -167,6 +183,9 @@ impl LineIndex { #[cfg(test)] mod tests { + use test_utils::skip_slow_tests; + + use super::WideEncoding::{Utf16, Utf32}; use super::*; #[test] @@ -210,67 +229,59 @@ mod tests { const C: char = 'x'; ", ); - assert_eq!(col_index.utf16_lines.len(), 0); + assert_eq!(col_index.line_wide_chars.len(), 0); } #[test] - fn test_single_char() { - let col_index = LineIndex::new( - " -const C: char = 'メ'; -", - ); - - assert_eq!(col_index.utf16_lines.len(), 1); - assert_eq!(col_index.utf16_lines[&1].len(), 1); - assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); - - // UTF-8 to UTF-16, no changes - assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); - - // UTF-8 to UTF-16 - assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20); - - // UTF-16 to UTF-8, no changes - assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15)); - - // UTF-16 to UTF-8 - assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); - - let col_index = LineIndex::new("a𐐏b"); - assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5)); - } - - #[test] - fn test_string() { - let col_index = LineIndex::new( - " -const C: char = \"メ メ\"; -", - ); - - assert_eq!(col_index.utf16_lines.len(), 1); - assert_eq!(col_index.utf16_lines[&1].len(), 2); - assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); - assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() }); - - // UTF-8 to UTF-16 - assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); - - assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19); - assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21); - - assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15); - - // UTF-16 to UTF-8 - assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15)); + fn test_every_chars() { + if skip_slow_tests() { + return; + } - // メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1 - assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20 - assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space - assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24 + let text: String = { + let mut chars: Vec = ((0 as char)..char::MAX).collect(); // Neat! + chars.extend("\n".repeat(chars.len() / 16).chars()); + let mut rng = oorandom::Rand32::new(stdx::rand::seed()); + stdx::rand::shuffle(&mut chars, |i| rng.rand_range(0..i as u32) as usize); + chars.into_iter().collect() + }; + assert!(text.contains('💩')); // Sanity check. + + let line_index = LineIndex::new(&text); + + let mut lin_col = LineCol { line: 0, col: 0 }; + let mut col_utf16 = 0; + let mut col_utf32 = 0; + for (offset, c) in text.char_indices() { + let got_offset = line_index.offset(lin_col).unwrap(); + assert_eq!(usize::from(got_offset), offset); + + let got_lin_col = line_index.line_col(got_offset); + assert_eq!(got_lin_col, lin_col); + + for enc in [Utf16, Utf32] { + let wide_lin_col = line_index.to_wide(enc, lin_col); + let got_lin_col = line_index.to_utf8(enc, wide_lin_col); + assert_eq!(got_lin_col, lin_col); + + let want_col = match enc { + Utf16 => col_utf16, + Utf32 => col_utf32, + }; + assert_eq!(wide_lin_col.col, want_col) + } - assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15)); + if c == '\n' { + lin_col.line += 1; + lin_col.col = 0; + col_utf16 = 0; + col_utf32 = 0; + } else { + lin_col.col += c.len_utf8() as u32; + col_utf16 += c.len_utf16() as u32; + col_utf32 += 1; + } + } } #[test] diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index ada2821d6b14e..c18a27f17d22c 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -455,15 +455,21 @@ impl<'a> FindUsages<'a> { } let find_nodes = move |name: &str, node: &syntax::SyntaxNode, offset: TextSize| { - node.token_at_offset(offset).find(|it| it.text() == name).map(|token| { - // FIXME: There should be optimization potential here - // Currently we try to descend everything we find which - // means we call `Semantics::descend_into_macros` on - // every textual hit. That function is notoriously - // expensive even for things that do not get down mapped - // into macros. - sema.descend_into_macros(token).into_iter().filter_map(|it| it.parent()) - }) + node.token_at_offset(offset) + .find(|it| { + // `name` is stripped of raw ident prefix. See the comment on name retrieval above. + it.text().trim_start_matches("r#") == name + }) + .into_iter() + .flat_map(|token| { + // FIXME: There should be optimization potential here + // Currently we try to descend everything we find which + // means we call `Semantics::descend_into_macros` on + // every textual hit. That function is notoriously + // expensive even for things that do not get down mapped + // into macros. + sema.descend_into_macros(token).into_iter().filter_map(|it| it.parent()) + }) }; for (text, file_id, search_range) in scope_files(sema, &search_scope) { @@ -471,30 +477,23 @@ impl<'a> FindUsages<'a> { // Search for occurrences of the items name for offset in match_indices(&text, finder, search_range) { - if let Some(iter) = find_nodes(name, &tree, offset) { - for name in iter.filter_map(ast::NameLike::cast) { - if match name { - ast::NameLike::NameRef(name_ref) => { - self.found_name_ref(&name_ref, sink) - } - ast::NameLike::Name(name) => self.found_name(&name, sink), - ast::NameLike::Lifetime(lifetime) => { - self.found_lifetime(&lifetime, sink) - } - } { - return; - } + for name in find_nodes(name, &tree, offset).filter_map(ast::NameLike::cast) { + if match name { + ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink), + ast::NameLike::Name(name) => self.found_name(&name, sink), + ast::NameLike::Lifetime(lifetime) => self.found_lifetime(&lifetime, sink), + } { + return; } } } // Search for occurrences of the `Self` referring to our type if let Some((self_ty, finder)) = &include_self_kw_refs { for offset in match_indices(&text, finder, search_range) { - if let Some(iter) = find_nodes("Self", &tree, offset) { - for name_ref in iter.filter_map(ast::NameRef::cast) { - if self.found_self_ty_name_ref(self_ty, &name_ref, sink) { - return; - } + for name_ref in find_nodes("Self", &tree, offset).filter_map(ast::NameRef::cast) + { + if self.found_self_ty_name_ref(self_ty, &name_ref, sink) { + return; } } } @@ -513,21 +512,21 @@ impl<'a> FindUsages<'a> { let tree = Lazy::new(move || sema.parse(file_id).syntax().clone()); for offset in match_indices(&text, finder, search_range) { - if let Some(iter) = find_nodes("super", &tree, offset) { - for name_ref in iter.filter_map(ast::NameRef::cast) { - if self.found_name_ref(&name_ref, sink) { - return; - } + for name_ref in + find_nodes("super", &tree, offset).filter_map(ast::NameRef::cast) + { + if self.found_name_ref(&name_ref, sink) { + return; } } } if let Some(finder) = &is_crate_root { for offset in match_indices(&text, finder, search_range) { - if let Some(iter) = find_nodes("crate", &tree, offset) { - for name_ref in iter.filter_map(ast::NameRef::cast) { - if self.found_name_ref(&name_ref, sink) { - return; - } + for name_ref in + find_nodes("crate", &tree, offset).filter_map(ast::NameRef::cast) + { + if self.found_name_ref(&name_ref, sink) { + return; } } } @@ -566,11 +565,10 @@ impl<'a> FindUsages<'a> { let finder = &Finder::new("self"); for offset in match_indices(&text, finder, search_range) { - if let Some(iter) = find_nodes("self", &tree, offset) { - for name_ref in iter.filter_map(ast::NameRef::cast) { - if self.found_self_module_name_ref(&name_ref, sink) { - return; - } + for name_ref in find_nodes("self", &tree, offset).filter_map(ast::NameRef::cast) + { + if self.found_self_module_name_ref(&name_ref, sink) { + return; } } } diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml index 414c08ff7e037..30e514e4136a4 100644 --- a/crates/ide/Cargo.toml +++ b/crates/ide/Cargo.toml @@ -22,7 +22,7 @@ pulldown-cmark-to-cmark = "10.0.4" pulldown-cmark = { version = "0.9.1", default-features = false } url = "2.3.1" dot = "0.1.4" -smallvec = "1.10.0" +smallvec.workspace = true # local deps cfg.workspace = true diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 2058a4f5f190a..5f2c61f5b5f80 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -201,6 +201,23 @@ fn hover_simple( Some(render::struct_rest_pat(sema, config, &record_pat)) }) + }) + // try () call hovers + .or_else(|| { + descended().find_map(|token| { + if token.kind() != T!['('] && token.kind() != T![')'] { + return None; + } + let arg_list = token.parent().and_then(ast::ArgList::cast)?.syntax().parent()?; + let call_expr = syntax::match_ast! { + match arg_list { + ast::CallExpr(expr) => expr.into(), + ast::MethodCallExpr(expr) => expr.into(), + _ => return None, + } + }; + render::type_info_of(sema, config, &Either::Left(call_expr)) + }) }); result.map(|mut res: HoverResult| { diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 2830212add8eb..bd7ce2f1d0d07 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -5612,3 +5612,38 @@ fn main() { "#, ); } + +#[test] +fn hover_call_parens() { + check( + r#" +fn foo() -> i32 {} +fn main() { + foo($0); +} +"#, + expect![[r#" + *)* + ```rust + i32 + ``` + "#]], + ); + check( + r#" +struct S; +impl S { + fn foo(self) -> i32 {} +} +fn main() { + S.foo($0); +} +"#, + expect![[r#" + *)* + ```rust + i32 + ``` + "#]], + ); +} diff --git a/crates/ide/src/inlay_hints/binding_mode.rs b/crates/ide/src/inlay_hints/binding_mode.rs index 11b9cd269bfa8..5d9729263c270 100644 --- a/crates/ide/src/inlay_hints/binding_mode.rs +++ b/crates/ide/src/inlay_hints/binding_mode.rs @@ -29,8 +29,17 @@ pub(super) fn hints( _ => None, }) .last(); - let range = - outer_paren_pat.as_ref().map_or_else(|| pat.syntax(), |it| it.syntax()).text_range(); + let range = outer_paren_pat.as_ref().map_or_else( + || match pat { + // for ident patterns that @ bind a name, render the un-ref patterns in front of the inner pattern + // instead of the name as that makes it more clear and doesn't really change the outcome + ast::Pat::IdentPat(it) => { + it.pat().map_or_else(|| it.syntax().text_range(), |it| it.syntax().text_range()) + } + it => it.syntax().text_range(), + }, + |it| it.syntax().text_range(), + ); let pattern_adjustments = sema.pattern_adjustments(pat); pattern_adjustments.iter().for_each(|ty| { let reference = ty.is_reference(); @@ -123,4 +132,20 @@ fn __( }"#, ); } + + #[test] + fn hints_binding_modes_complex_ident_pat() { + check_with_config( + InlayHintsConfig { binding_mode_hints: true, ..DISABLED_CONFIG }, + r#" +struct Struct { + field: &'static str, +} +fn foo(s @ Struct { field, .. }: &Struct) {} + //^^^^^^^^^^^^^^^^^^^^^^^^ref + //^^^^^^^^^^^^^^^^^^^^& + //^^^^^ref +"#, + ); + } } diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 4ead9d4d0a869..f2b535bdc7efe 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -115,7 +115,7 @@ pub use ide_db::{ SourceRoot, SourceRootId, }, label::Label, - line_index::{LineCol, LineColUtf16, LineIndex}, + line_index::{LineCol, LineIndex}, search::{ReferenceCategory, SearchScope}, source_change::{FileSystemEdit, SourceChange}, symbol_index::Query, diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index 60fb1544a8fe5..cabbc287279f6 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -2016,4 +2016,19 @@ fn method$0() {} "#]], ); } + + #[test] + fn raw_identifier() { + check( + r#" +fn r#fn$0() {} +fn main() { r#fn(); } +"#, + expect![[r#" + r#fn Function FileId(0) 0..12 3..7 + + FileId(0) 25..29 + "#]], + ); + } } diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index 8e89160ef5e05..c0237e1edd0d3 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -1371,7 +1371,6 @@ pub fn baz() {} #[test] fn test_rename_mod_from_raw_ident() { - // FIXME: `r#fn` in path expression is not renamed. check_expect( "foo", r#" @@ -1397,6 +1396,10 @@ pub fn baz() {} insert: "foo", delete: 4..8, }, + Indel { + insert: "foo", + delete: 23..27, + }, ], }, }, diff --git a/crates/ide/src/shuffle_crate_graph.rs b/crates/ide/src/shuffle_crate_graph.rs index ae539a5d397f7..e606072a82375 100644 --- a/crates/ide/src/shuffle_crate_graph.rs +++ b/crates/ide/src/shuffle_crate_graph.rs @@ -18,7 +18,9 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { let crate_graph = db.crate_graph(); let mut shuffled_ids = crate_graph.iter().collect::>(); - shuffle(&mut shuffled_ids); + + let mut rng = oorandom::Rand32::new(stdx::rand::seed()); + stdx::rand::shuffle(&mut shuffled_ids, |i| rng.rand_range(0..i as u32) as usize); let mut new_graph = CrateGraph::default(); @@ -52,21 +54,3 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { db.set_crate_graph_with_durability(Arc::new(new_graph), Durability::HIGH); } - -fn shuffle(slice: &mut [T]) { - let mut rng = oorandom::Rand32::new(seed()); - - let mut remaining = slice.len() - 1; - while remaining > 0 { - let index = rng.rand_range(0..remaining as u32); - slice.swap(remaining, index as usize); - remaining -= 1; - } -} - -fn seed() -> u64 { - use std::collections::hash_map::RandomState; - use std::hash::{BuildHasher, Hasher}; - - RandomState::new().build_hasher().finish() -} diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index a666562f1010c..f70ca55a508d2 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -7,12 +7,16 @@ use either::Either; use hir::{ AssocItem, GenericParam, HasAttrs, HirDisplay, ModuleDef, PathResolution, Semantics, Trait, }; -use ide_db::{active_parameter::callable_for_node, base_db::FilePosition, FxIndexMap}; +use ide_db::{ + active_parameter::{callable_for_node, generic_def_for_node}, + base_db::FilePosition, + FxIndexMap, +}; use stdx::format_to; use syntax::{ algo, ast::{self, HasArgList}, - match_ast, AstNode, Direction, SyntaxKind, SyntaxToken, TextRange, TextSize, + match_ast, AstNode, Direction, SyntaxToken, TextRange, TextSize, }; use crate::RootDatabase; @@ -105,10 +109,10 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio // Stop at multi-line expressions, since the signature of the outer call is not very // helpful inside them. if let Some(expr) = ast::Expr::cast(node.clone()) { - if expr.syntax().text().contains_char('\n') - && expr.syntax().kind() != SyntaxKind::RECORD_EXPR + if !matches!(expr, ast::Expr::RecordExpr(..)) + && expr.syntax().text().contains_char('\n') { - return None; + break; } } } @@ -122,18 +126,16 @@ fn signature_help_for_call( token: SyntaxToken, ) -> Option { // Find the calling expression and its NameRef - let mut node = arg_list.syntax().parent()?; + let mut nodes = arg_list.syntax().ancestors().skip(1); let calling_node = loop { - if let Some(callable) = ast::CallableExpr::cast(node.clone()) { - if callable + if let Some(callable) = ast::CallableExpr::cast(nodes.next()?) { + let inside_callable = callable .arg_list() - .map_or(false, |it| it.syntax().text_range().contains(token.text_range().start())) - { + .map_or(false, |it| it.syntax().text_range().contains(token.text_range().start())); + if inside_callable { break callable; } } - - node = node.parent()?; }; let (callable, active_parameter) = callable_for_node(sema, &calling_node, &token)?; @@ -216,59 +218,11 @@ fn signature_help_for_call( fn signature_help_for_generics( sema: &Semantics<'_, RootDatabase>, - garg_list: ast::GenericArgList, + arg_list: ast::GenericArgList, token: SyntaxToken, ) -> Option { - let arg_list = garg_list - .syntax() - .ancestors() - .filter_map(ast::GenericArgList::cast) - .find(|list| list.syntax().text_range().contains(token.text_range().start()))?; - - let mut active_parameter = arg_list - .generic_args() - .take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start()) - .count(); - - let first_arg_is_non_lifetime = arg_list - .generic_args() - .next() - .map_or(false, |arg| !matches!(arg, ast::GenericArg::LifetimeArg(_))); - - let mut generics_def = if let Some(path) = - arg_list.syntax().ancestors().find_map(ast::Path::cast) - { - let res = sema.resolve_path(&path)?; - let generic_def: hir::GenericDef = match res { - hir::PathResolution::Def(hir::ModuleDef::Adt(it)) => it.into(), - hir::PathResolution::Def(hir::ModuleDef::Function(it)) => it.into(), - hir::PathResolution::Def(hir::ModuleDef::Trait(it)) => it.into(), - hir::PathResolution::Def(hir::ModuleDef::TypeAlias(it)) => it.into(), - hir::PathResolution::Def(hir::ModuleDef::Variant(it)) => it.into(), - hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_)) - | hir::PathResolution::Def(hir::ModuleDef::Const(_)) - | hir::PathResolution::Def(hir::ModuleDef::Macro(_)) - | hir::PathResolution::Def(hir::ModuleDef::Module(_)) - | hir::PathResolution::Def(hir::ModuleDef::Static(_)) => return None, - hir::PathResolution::BuiltinAttr(_) - | hir::PathResolution::ToolModule(_) - | hir::PathResolution::Local(_) - | hir::PathResolution::TypeParam(_) - | hir::PathResolution::ConstParam(_) - | hir::PathResolution::SelfType(_) - | hir::PathResolution::DeriveHelper(_) => return None, - }; - - generic_def - } else if let Some(method_call) = arg_list.syntax().parent().and_then(ast::MethodCallExpr::cast) - { - // recv.method::<$0>() - let method = sema.resolve_method_call(&method_call)?; - method.into() - } else { - return None; - }; - + let (mut generics_def, mut active_parameter, first_arg_is_non_lifetime) = + generic_def_for_node(sema, &arg_list, &token)?; let mut res = SignatureHelp { doc: None, signature: String::new(), @@ -307,9 +261,9 @@ fn signature_help_for_generics( // eg. `None::` // We'll use the signature of the enum, but include the docs of the variant. res.doc = it.docs(db).map(|it| it.into()); - let it = it.parent_enum(db); - format_to!(res.signature, "enum {}", it.name(db)); - generics_def = it.into(); + let enum_ = it.parent_enum(db); + format_to!(res.signature, "enum {}", enum_.name(db)); + generics_def = enum_.into(); } // These don't have generic args that can be specified hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) => return None, @@ -388,16 +342,13 @@ fn signature_help_for_record_lit( record: ast::RecordExpr, token: SyntaxToken, ) -> Option { - let arg_list = record - .syntax() - .ancestors() - .filter_map(ast::RecordExpr::cast) - .find(|list| list.syntax().text_range().contains(token.text_range().start()))?; - - let active_parameter = arg_list + let active_parameter = record .record_expr_field_list()? - .fields() - .take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start()) + .syntax() + .children_with_tokens() + .filter_map(syntax::NodeOrToken::into_token) + .filter(|t| t.kind() == syntax::T![,]) + .take_while(|t| t.text_range().start() <= token.text_range().start()) .count(); let mut res = SignatureHelp { @@ -1594,4 +1545,27 @@ impl S { "#]], ); } + + #[test] + fn test_enum_in_nested_method_in_lambda() { + check( + r#" +enum A { + A, + B +} + +fn bar(_: A) { } + +fn main() { + let foo = Foo; + std::thread::spawn(move || { bar(A:$0) } ); +} +"#, + expect![[r#" + fn bar(_: A) + ^^^^ + "#]], + ); + } } diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 2f870d769c0fb..fc9b5d3ba4cd7 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -1126,5 +1126,5 @@ fn benchmark_syntax_highlighting_parser() { .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function)) .count() }; - assert_eq!(hash, 1609); + assert_eq!(hash, 1608); } diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml index 280ffc219bac5..82105522ebdd9 100644 --- a/crates/mbe/Cargo.toml +++ b/crates/mbe/Cargo.toml @@ -14,7 +14,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" rustc-hash = "1.1.0" -smallvec = "1.10.0" +smallvec.workspace = true tracing = "0.1.35" # local deps diff --git a/crates/parser/Cargo.toml b/crates/parser/Cargo.toml index 08359133f1aa7..6e962abd75477 100644 --- a/crates/parser/Cargo.toml +++ b/crates/parser/Cargo.toml @@ -20,4 +20,5 @@ limit.workspace = true [dev-dependencies] expect-test = "1.4.0" +stdx.workspace = true sourcegen.workspace = true diff --git a/crates/parser/src/grammar.rs b/crates/parser/src/grammar.rs index 485b612f08187..15ec9e167e025 100644 --- a/crates/parser/src/grammar.rs +++ b/crates/parser/src/grammar.rs @@ -200,6 +200,8 @@ impl BlockLike { } } +const VISIBILITY_FIRST: TokenSet = TokenSet::new(&[T![pub], T![crate]]); + fn opt_visibility(p: &mut Parser<'_>, in_tuple_field: bool) -> bool { match p.current() { T![pub] => { @@ -340,3 +342,31 @@ fn error_block(p: &mut Parser<'_>, message: &str) { p.eat(T!['}']); m.complete(p, ERROR); } + +/// The `parser` passed this is required to at least consume one token if it returns `true`. +/// If the `parser` returns false, parsing will stop. +fn delimited( + p: &mut Parser<'_>, + bra: SyntaxKind, + ket: SyntaxKind, + delim: SyntaxKind, + first_set: TokenSet, + mut parser: impl FnMut(&mut Parser<'_>) -> bool, +) { + p.bump(bra); + while !p.at(ket) && !p.at(EOF) { + if !parser(p) { + break; + } + if !p.at(delim) { + if p.at_ts(first_set) { + p.error(format!("expected {:?}", delim)); + } else { + break; + } + } else { + p.bump(delim); + } + } + p.expect(ket); +} diff --git a/crates/parser/src/grammar/attributes.rs b/crates/parser/src/grammar/attributes.rs index 0cf6a16f86a57..4ecaa6e6a85e9 100644 --- a/crates/parser/src/grammar/attributes.rs +++ b/crates/parser/src/grammar/attributes.rs @@ -1,5 +1,7 @@ use super::*; +pub(super) const ATTRIBUTE_FIRST: TokenSet = TokenSet::new(&[T![#]]); + pub(super) fn inner_attrs(p: &mut Parser<'_>) { while p.at(T![#]) && p.nth(1) == T![!] { attr(p, true); diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index 7516ac3c4bd37..4b080102a2c3d 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -1,5 +1,7 @@ mod atom; +use crate::grammar::attributes::ATTRIBUTE_FIRST; + use super::*; pub(crate) use self::atom::{block_expr, match_arm_list}; @@ -68,6 +70,12 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) { Err(m) => m, }; + if !p.at_ts(EXPR_FIRST) { + p.err_and_bump("expected expression, item or let statement"); + m.abandon(p); + return; + } + if let Some((cm, blocklike)) = expr_stmt(p, Some(m)) { if !(p.at(T!['}']) || (semicolon != Semicolon::Required && p.at(EOF))) { // test no_semi_after_block @@ -227,6 +235,12 @@ fn expr_bp( attributes::outer_attrs(p); m }); + + if !p.at_ts(EXPR_FIRST) { + p.err_recover("expected expression", atom::EXPR_RECOVERY_SET); + m.abandon(p); + return None; + } let mut lhs = match lhs(p, r) { Some((lhs, blocklike)) => { let lhs = lhs.extend_to(p, m); @@ -551,23 +565,20 @@ fn cast_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker { m.complete(p, CAST_EXPR) } +// test_err arg_list_recovery +// fn main() { +// foo(bar::); +// foo(bar:); +// foo(bar+); +// } fn arg_list(p: &mut Parser<'_>) { assert!(p.at(T!['('])); let m = p.start(); - p.bump(T!['(']); - while !p.at(T![')']) && !p.at(EOF) { - // test arg_with_attr - // fn main() { - // foo(#[attr] 92) - // } - if !expr(p) { - break; - } - if !p.at(T![')']) && !p.expect(T![,]) { - break; - } - } - p.eat(T![')']); + // test arg_with_attr + // fn main() { + // foo(#[attr] 92) + // } + delimited(p, T!['('], T![')'], T![,], EXPR_FIRST.union(ATTRIBUTE_FIRST), expr); m.complete(p, ARG_LIST); } diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index a23f900b73864..efc2603835e8f 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs @@ -40,26 +40,28 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = T!['{'], T!['['], T![|], - T![move], + T![async], T![box], + T![break], + T![const], + T![continue], + T![do], + T![for], T![if], - T![while], + T![let], + T![loop], T![match], - T![unsafe], + T![move], T![return], - T![yield], - T![do], - T![break], - T![continue], - T![async], + T![static], T![try], - T![const], - T![loop], - T![for], + T![unsafe], + T![while], + T![yield], LIFETIME_IDENT, ])); -const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[T![let]]); +pub(super) const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[T![')'], T![']']]); pub(super) fn atom_expr( p: &mut Parser<'_>, @@ -116,7 +118,7 @@ pub(super) fn atom_expr( // fn main() { // 'loop: impl // } - p.error("expected a loop"); + p.error("expected a loop or block"); m.complete(p, ERROR); return None; } @@ -157,7 +159,7 @@ pub(super) fn atom_expr( T![for] => for_expr(p, None), _ => { - p.err_recover("expected expression", EXPR_RECOVERY_SET); + p.err_and_bump("expected expression"); return None; } }; diff --git a/crates/parser/src/grammar/generic_args.rs b/crates/parser/src/grammar/generic_args.rs index c438943a00262..919d9b91ebab5 100644 --- a/crates/parser/src/grammar/generic_args.rs +++ b/crates/parser/src/grammar/generic_args.rs @@ -5,27 +5,35 @@ pub(super) fn opt_generic_arg_list(p: &mut Parser<'_>, colon_colon_required: boo if p.at(T![::]) && p.nth(2) == T![<] { m = p.start(); p.bump(T![::]); - p.bump(T![<]); } else if !colon_colon_required && p.at(T![<]) && p.nth(1) != T![=] { m = p.start(); - p.bump(T![<]); } else { return; } - while !p.at(EOF) && !p.at(T![>]) { - generic_arg(p); - if !p.at(T![>]) && !p.expect(T![,]) { - break; - } - } - p.expect(T![>]); + delimited(p, T![<], T![>], T![,], GENERIC_ARG_FIRST, generic_arg); m.complete(p, GENERIC_ARG_LIST); } +const GENERIC_ARG_FIRST: TokenSet = TokenSet::new(&[ + LIFETIME_IDENT, + IDENT, + T!['{'], + T![true], + T![false], + T![-], + INT_NUMBER, + FLOAT_NUMBER, + CHAR, + BYTE, + STRING, + BYTE_STRING, +]) +.union(types::TYPE_FIRST); + // test generic_arg // type T = S; -fn generic_arg(p: &mut Parser<'_>) { +fn generic_arg(p: &mut Parser<'_>) -> bool { match p.current() { LIFETIME_IDENT => lifetime_arg(p), T!['{'] | T![true] | T![false] | T![-] => const_arg(p), @@ -68,8 +76,10 @@ fn generic_arg(p: &mut Parser<'_>) { } } } - _ => type_arg(p), + _ if p.at_ts(types::TYPE_FIRST) => type_arg(p), + _ => return false, } + true } // test lifetime_arg diff --git a/crates/parser/src/grammar/generic_params.rs b/crates/parser/src/grammar/generic_params.rs index 6db28ef13239c..7fcf938babdb1 100644 --- a/crates/parser/src/grammar/generic_params.rs +++ b/crates/parser/src/grammar/generic_params.rs @@ -1,3 +1,5 @@ +use crate::grammar::attributes::ATTRIBUTE_FIRST; + use super::*; pub(super) fn opt_generic_param_list(p: &mut Parser<'_>) { @@ -11,32 +13,31 @@ pub(super) fn opt_generic_param_list(p: &mut Parser<'_>) { fn generic_param_list(p: &mut Parser<'_>) { assert!(p.at(T![<])); let m = p.start(); - p.bump(T![<]); + delimited(p, T![<], T![>], T![,], GENERIC_PARAM_FIRST.union(ATTRIBUTE_FIRST), |p| { + // test generic_param_attribute + // fn foo<#[lt_attr] 'a, #[t_attr] T>() {} + let m = p.start(); + attributes::outer_attrs(p); + generic_param(p, m) + }); - while !p.at(EOF) && !p.at(T![>]) { - generic_param(p); - if !p.at(T![>]) && !p.expect(T![,]) { - break; - } - } - p.expect(T![>]); m.complete(p, GENERIC_PARAM_LIST); } -fn generic_param(p: &mut Parser<'_>) { - let m = p.start(); - // test generic_param_attribute - // fn foo<#[lt_attr] 'a, #[t_attr] T>() {} - attributes::outer_attrs(p); +const GENERIC_PARAM_FIRST: TokenSet = TokenSet::new(&[IDENT, LIFETIME_IDENT, T![const]]); + +fn generic_param(p: &mut Parser<'_>, m: Marker) -> bool { match p.current() { LIFETIME_IDENT => lifetime_param(p, m), IDENT => type_param(p, m), T![const] => const_param(p, m), _ => { m.abandon(p); - p.err_and_bump("expected type parameter"); + p.err_and_bump("expected generic parameter"); + return false; } } + true } // test lifetime_param diff --git a/crates/parser/src/grammar/items/adt.rs b/crates/parser/src/grammar/items/adt.rs index e7d30516b9510..17f41b8e13a40 100644 --- a/crates/parser/src/grammar/items/adt.rs +++ b/crates/parser/src/grammar/items/adt.rs @@ -1,3 +1,5 @@ +use crate::grammar::attributes::ATTRIBUTE_FIRST; + use super::*; // test struct_item @@ -141,28 +143,31 @@ pub(crate) fn record_field_list(p: &mut Parser<'_>) { } } +const TUPLE_FIELD_FIRST: TokenSet = + types::TYPE_FIRST.union(ATTRIBUTE_FIRST).union(VISIBILITY_FIRST); + fn tuple_field_list(p: &mut Parser<'_>) { assert!(p.at(T!['('])); let m = p.start(); - p.bump(T!['(']); - while !p.at(T![')']) && !p.at(EOF) { + delimited(p, T!['('], T![')'], T![,], TUPLE_FIELD_FIRST, |p| { let m = p.start(); // test tuple_field_attrs // struct S (#[attr] f32); attributes::outer_attrs(p); - opt_visibility(p, true); + let has_vis = opt_visibility(p, true); if !p.at_ts(types::TYPE_FIRST) { p.error("expected a type"); - m.complete(p, ERROR); - break; + if has_vis { + m.complete(p, ERROR); + } else { + m.abandon(p); + } + return false; } types::type_(p); m.complete(p, TUPLE_FIELD); + true + }); - if !p.at(T![')']) { - p.expect(T![,]); - } - } - p.expect(T![')']); m.complete(p, TUPLE_FIELD_LIST); } diff --git a/crates/parser/src/grammar/params.rs b/crates/parser/src/grammar/params.rs index 20e8e95f0662c..74eae9151a265 100644 --- a/crates/parser/src/grammar/params.rs +++ b/crates/parser/src/grammar/params.rs @@ -1,3 +1,5 @@ +use crate::grammar::attributes::ATTRIBUTE_FIRST; + use super::*; // test param_list @@ -66,14 +68,20 @@ fn list_(p: &mut Parser<'_>, flavor: Flavor) { } }; - if !p.at_ts(PARAM_FIRST) { + if !p.at_ts(PARAM_FIRST.union(ATTRIBUTE_FIRST)) { p.error("expected value parameter"); m.abandon(p); break; } param(p, m, flavor); - if !p.at(ket) { - p.expect(T![,]); + if !p.at(T![,]) { + if p.at_ts(PARAM_FIRST.union(ATTRIBUTE_FIRST)) { + p.error("expected `,`"); + } else { + break; + } + } else { + p.bump(T![,]); } } diff --git a/crates/parser/src/grammar/paths.rs b/crates/parser/src/grammar/paths.rs index af3b6f63cf51c..1064ae9970c99 100644 --- a/crates/parser/src/grammar/paths.rs +++ b/crates/parser/src/grammar/paths.rs @@ -67,6 +67,10 @@ fn path_for_qualifier( } } +const EXPR_PATH_SEGMENT_RECOVERY_SET: TokenSet = + items::ITEM_RECOVERY_SET.union(TokenSet::new(&[T![')'], T![,], T![let]])); +const TYPE_PATH_SEGMENT_RECOVERY_SET: TokenSet = types::TYPE_RECOVERY_SET; + fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) { let m = p.start(); // test qual_paths @@ -102,7 +106,12 @@ fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) { m.complete(p, NAME_REF); } _ => { - p.err_recover("expected identifier", items::ITEM_RECOVERY_SET); + let recover_set = match mode { + Mode::Use => items::ITEM_RECOVERY_SET, + Mode::Type => TYPE_PATH_SEGMENT_RECOVERY_SET, + Mode::Expr => EXPR_PATH_SEGMENT_RECOVERY_SET, + }; + p.err_recover("expected identifier", recover_set); if empty { // test_err empty_segment // use crate::; diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs index 5c6e18fee8bff..7d0b156c5a06a 100644 --- a/crates/parser/src/grammar/types.rs +++ b/crates/parser/src/grammar/types.rs @@ -17,8 +17,9 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[ T![Self], ])); -const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[ +pub(super) const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[ T![')'], + T![>], T![,], // test_err struct_field_recover // struct S { f pub g: () } diff --git a/crates/parser/src/tests.rs b/crates/parser/src/tests.rs index c1b4e9a7d8aec..2fec765bd7871 100644 --- a/crates/parser/src/tests.rs +++ b/crates/parser/src/tests.rs @@ -15,6 +15,7 @@ use crate::{LexedStr, TopEntryPoint}; #[test] fn lex_ok() { for case in TestCase::list("lexer/ok") { + let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); let actual = lex(&case.text); expect_file![case.rast].assert_eq(&actual) } @@ -23,6 +24,7 @@ fn lex_ok() { #[test] fn lex_err() { for case in TestCase::list("lexer/err") { + let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); let actual = lex(&case.text); expect_file![case.rast].assert_eq(&actual) } @@ -46,6 +48,7 @@ fn lex(text: &str) -> String { #[test] fn parse_ok() { for case in TestCase::list("parser/ok") { + let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual); @@ -55,6 +58,7 @@ fn parse_ok() { #[test] fn parse_inline_ok() { for case in TestCase::list("parser/inline/ok") { + let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual); @@ -64,6 +68,7 @@ fn parse_inline_ok() { #[test] fn parse_err() { for case in TestCase::list("parser/err") { + let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual) @@ -73,6 +78,7 @@ fn parse_err() { #[test] fn parse_inline_err() { for case in TestCase::list("parser/inline/err") { + let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual) diff --git a/crates/parser/src/tests/top_entries.rs b/crates/parser/src/tests/top_entries.rs index eb640dc7fc74b..49dd9e293b8fe 100644 --- a/crates/parser/src/tests/top_entries.rs +++ b/crates/parser/src/tests/top_entries.rs @@ -65,7 +65,7 @@ fn macro_stmt() { MACRO_STMTS ERROR SHEBANG "#!/usr/bin/rust" - error 0: expected expression + error 0: expected expression, item or let statement "##]], ); check( diff --git a/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rast b/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rast index a0154321718b6..cdc01863ab04c 100644 --- a/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rast +++ b/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rast @@ -44,8 +44,7 @@ SOURCE_FILE IDENT "T" SEMICOLON ";" WHITESPACE "\n" -error 9: expected type parameter -error 11: expected COMMA +error 9: expected generic parameter error 11: expected R_ANGLE error 11: expected `;`, `{`, or `(` error 12: expected an item diff --git a/crates/parser/test_data/parser/err/0013_invalid_type.rast b/crates/parser/test_data/parser/err/0013_invalid_type.rast index eec84a0c67d9e..b485c71ab394f 100644 --- a/crates/parser/test_data/parser/err/0013_invalid_type.rast +++ b/crates/parser/test_data/parser/err/0013_invalid_type.rast @@ -43,17 +43,14 @@ SOURCE_FILE IDENT "Box" GENERIC_ARG_LIST L_ANGLE "<" - TYPE_ARG - ERROR - AT "@" - WHITESPACE " " - TUPLE_FIELD - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Any" - ERROR + ERROR + AT "@" + WHITESPACE " " + MACRO_CALL + PATH + PATH_SEGMENT + NAME_REF + IDENT "Any" ERROR R_ANGLE ">" ERROR @@ -69,17 +66,14 @@ SOURCE_FILE ERROR SEMICOLON ";" WHITESPACE "\n\n" -error 67: expected type -error 68: expected COMMA -error 68: expected R_ANGLE -error 68: expected COMMA -error 68: expected R_ANGLE -error 68: expected COMMA -error 68: expected R_ANGLE -error 68: expected COMMA -error 72: expected COMMA -error 72: expected a type -error 72: expected R_PAREN +error 67: expected R_ANGLE +error 67: expected R_ANGLE +error 67: expected R_ANGLE +error 67: expected R_PAREN +error 67: expected SEMICOLON +error 67: expected an item +error 72: expected BANG +error 72: expected `{`, `[`, `(` error 72: expected SEMICOLON error 72: expected an item error 73: expected an item diff --git a/crates/parser/test_data/parser/err/0022_bad_exprs.rast b/crates/parser/test_data/parser/err/0022_bad_exprs.rast index 900394bd96018..d97fc6c72091d 100644 --- a/crates/parser/test_data/parser/err/0022_bad_exprs.rast +++ b/crates/parser/test_data/parser/err/0022_bad_exprs.rast @@ -145,27 +145,29 @@ SOURCE_FILE error 16: expected expression error 17: expected R_BRACK error 17: expected SEMICOLON -error 17: expected expression +error 17: expected expression, item or let statement error 25: expected a name error 26: expected `;`, `{`, or `(` error 30: expected pattern error 31: expected SEMICOLON error 53: expected expression +error 54: expected R_PAREN error 54: expected SEMICOLON -error 54: expected expression +error 54: expected expression, item or let statement error 60: expected type error 60: expected `{` -error 60: expected expression +error 60: expected expression, item or let statement error 65: expected pattern error 65: expected SEMICOLON -error 65: expected expression +error 65: expected expression, item or let statement error 92: expected expression +error 93: expected R_PAREN error 93: expected SEMICOLON -error 93: expected expression -error 95: expected expression -error 96: expected expression +error 93: expected expression, item or let statement +error 95: expected expression, item or let statement +error 96: expected expression, item or let statement error 103: expected a name error 104: expected `{` error 108: expected pattern error 108: expected SEMICOLON -error 108: expected expression +error 108: expected expression, item or let statement diff --git a/crates/parser/test_data/parser/err/0024_many_type_parens.rast b/crates/parser/test_data/parser/err/0024_many_type_parens.rast index d374f86610b28..f0dbc9b1027fa 100644 --- a/crates/parser/test_data/parser/err/0024_many_type_parens.rast +++ b/crates/parser/test_data/parser/err/0024_many_type_parens.rast @@ -168,75 +168,21 @@ SOURCE_FILE L_PAREN "(" ERROR QUESTION "?" - EXPR_STMT - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "Sized" + TYPE_ARG + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Sized" ERROR R_PAREN ")" WHITESPACE " " ERROR PLUS "+" WHITESPACE " " - TUPLE_EXPR - L_PAREN "(" - CLOSURE_EXPR - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" - WHITESPACE " " - BIN_EXPR - BIN_EXPR - BIN_EXPR - BIN_EXPR - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "Trait" - L_ANGLE "<" - ERROR - LIFETIME_IDENT "'a" - R_ANGLE ">" - ERROR - R_PAREN ")" - WHITESPACE " " - PLUS "+" - WHITESPACE " " - PAREN_EXPR - L_PAREN "(" - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "Copy" - R_PAREN ")" - R_ANGLE ">" - ERROR - SEMICOLON ";" - WHITESPACE "\n " - LET_EXPR - LET_KW "let" - WHITESPACE " " - WILDCARD_PAT - UNDERSCORE "_" - ERROR - COLON ":" - WHITESPACE " " + EXPR_STMT BIN_EXPR BIN_EXPR - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "Box" - L_ANGLE "<" TUPLE_EXPR L_PAREN "(" CLOSURE_EXPR @@ -250,78 +196,117 @@ SOURCE_FILE WHITESPACE " " BIN_EXPR BIN_EXPR - BIN_EXPR - BIN_EXPR - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "Trait" - L_ANGLE "<" - ERROR - LIFETIME_IDENT "'a" - R_ANGLE ">" - ERROR - R_PAREN ")" - WHITESPACE " " - PLUS "+" - WHITESPACE " " - PAREN_EXPR - L_PAREN "(" - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "Copy" - R_PAREN ")" - WHITESPACE " " - PLUS "+" - WHITESPACE " " - PAREN_EXPR - L_PAREN "(" + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + L_ANGLE "<" ERROR - QUESTION "?" + LIFETIME_IDENT "'a" + R_ANGLE ">" + R_PAREN ")" + WHITESPACE " " + PLUS "+" + WHITESPACE " " + PAREN_EXPR + L_PAREN "(" PATH_EXPR PATH PATH_SEGMENT NAME_REF - IDENT "Sized" + IDENT "Copy" R_PAREN ")" R_ANGLE ">" ERROR SEMICOLON ";" + WHITESPACE "\n " + LET_STMT + LET_KW "let" + WHITESPACE " " + WILDCARD_PAT + UNDERSCORE "_" + COLON ":" + WHITESPACE " " + DYN_TRAIT_TYPE + TYPE_BOUND_LIST + TYPE_BOUND + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Box" + GENERIC_ARG_LIST + L_ANGLE "<" + TYPE_ARG + PAREN_TYPE + L_PAREN "(" + FOR_TYPE + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + GENERIC_ARG_LIST + L_ANGLE "<" + LIFETIME_ARG + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" + R_PAREN ")" + WHITESPACE " " + PLUS "+" + WHITESPACE " " + TYPE_BOUND + L_PAREN "(" + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Copy" + R_PAREN ")" + WHITESPACE " " + PLUS "+" + WHITESPACE " " + TYPE_BOUND + L_PAREN "(" + QUESTION "?" + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Sized" + R_PAREN ")" + ERROR + R_ANGLE ">" + SEMICOLON ";" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" -error 88: expected COMMA error 88: expected R_ANGLE error 121: expected SEMICOLON -error 121: expected expression +error 121: expected expression, item or let statement error 140: expected type error 141: expected R_PAREN error 141: expected COMMA -error 141: expected R_ANGLE -error 141: expected SEMICOLON +error 146: expected R_ANGLE error 146: expected SEMICOLON -error 146: expected expression -error 148: expected expression +error 146: expected expression, item or let statement +error 148: expected expression, item or let statement error 158: expected `|` error 158: expected COMMA error 165: expected expression error 168: expected expression error 179: expected expression -error 180: expected COMMA -error 190: expected EQ -error 190: expected expression -error 191: expected COMMA -error 204: expected `|` -error 204: expected COMMA -error 211: expected expression -error 214: expected expression -error 228: expected expression -error 229: expected R_PAREN -error 229: expected COMMA -error 236: expected expression -error 237: expected COMMA -error 237: expected expression -error 237: expected R_PAREN +error 180: expected SEMICOLON +error 215: expected R_ANGLE +error 235: expected SEMICOLON +error 235: expected expression, item or let statement diff --git a/crates/parser/test_data/parser/err/0025_nope.rast b/crates/parser/test_data/parser/err/0025_nope.rast index 6b49724ec9aa1..b6bc0088374fb 100644 --- a/crates/parser/test_data/parser/err/0025_nope.rast +++ b/crates/parser/test_data/parser/err/0025_nope.rast @@ -156,8 +156,7 @@ SOURCE_FILE PATH_SEGMENT NAME_REF IDENT "i32" - WHITESPACE " " - ERROR + WHITESPACE " " ERROR L_CURLY "{" R_CURLY "}" @@ -199,10 +198,8 @@ error 95: expected type error 95: expected COMMA error 96: expected field error 98: expected field declaration +error 371: expected R_PAREN error 371: expected COMMA -error 372: expected a type -error 372: expected R_PAREN -error 372: expected COMMA error 372: expected enum variant error 374: expected enum variant error 494: expected pattern diff --git a/crates/parser/test_data/parser/err/0042_weird_blocks.rast b/crates/parser/test_data/parser/err/0042_weird_blocks.rast index 9cea337ce9c49..1cdc6e6e71927 100644 --- a/crates/parser/test_data/parser/err/0042_weird_blocks.rast +++ b/crates/parser/test_data/parser/err/0042_weird_blocks.rast @@ -72,4 +72,4 @@ SOURCE_FILE error 24: expected existential, fn, trait or impl error 41: expected existential, fn, trait or impl error 56: expected a block -error 75: expected a loop +error 75: expected a loop or block diff --git a/crates/parser/test_data/parser/err/0048_double_fish.rast b/crates/parser/test_data/parser/err/0048_double_fish.rast index 3a05bfee1ee9b..207a5c24dffd4 100644 --- a/crates/parser/test_data/parser/err/0048_double_fish.rast +++ b/crates/parser/test_data/parser/err/0048_double_fish.rast @@ -12,7 +12,7 @@ SOURCE_FILE STMT_LIST L_CURLY "{" WHITESPACE "\n " - EXPR_STMT + BIN_EXPR PATH_EXPR PATH PATH_SEGMENT @@ -41,13 +41,14 @@ SOURCE_FILE COLON2 "::" ERROR L_ANGLE "<" - BIN_EXPR - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "nope" - SHR ">>" + TYPE_ARG + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "nope" + R_ANGLE ">" + R_ANGLE ">" ERROR SEMICOLON ";" WHITESPACE "\n" @@ -114,8 +115,6 @@ SOURCE_FILE WHITESPACE "\n" error 30: expected identifier error 31: expected COMMA -error 31: expected R_ANGLE -error 31: expected SEMICOLON error 37: expected expression error 75: expected identifier error 76: expected SEMICOLON diff --git a/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast b/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast index 56cea4b15674c..ea5203fb96e00 100644 --- a/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast +++ b/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast @@ -23,6 +23,6 @@ SOURCE_FILE WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" -error 22: expected a loop +error 22: expected a loop or block error 27: expected type error 27: expected `{` diff --git a/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rast b/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rast new file mode 100644 index 0000000000000..5d0fe859c2965 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rast @@ -0,0 +1,77 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + ARG_LIST + L_PAREN "(" + PATH_EXPR + PATH + PATH + PATH_SEGMENT + NAME_REF + IDENT "bar" + COLON2 "::" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + ARG_LIST + L_PAREN "(" + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "bar" + ERROR + COLON ":" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + ARG_LIST + L_PAREN "(" + BIN_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "bar" + PLUS "+" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 25: expected identifier +error 39: expected COMMA +error 39: expected expression +error 55: expected expression diff --git a/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rs b/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rs new file mode 100644 index 0000000000000..0e7ac9cc30755 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rs @@ -0,0 +1,5 @@ +fn main() { + foo(bar::); + foo(bar:); + foo(bar+); +} diff --git a/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast b/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast index e72df374d1bcf..ea50ad35d74db 100644 --- a/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast +++ b/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast @@ -49,5 +49,5 @@ SOURCE_FILE R_CURLY "}" WHITESPACE "\n" error 6: missing type for function parameter -error 6: expected COMMA +error 6: expected `,` error 16: missing type for function parameter diff --git a/crates/proc-macro-api/src/version.rs b/crates/proc-macro-api/src/version.rs index 40125c2a512ad..cf637ec359a2d 100644 --- a/crates/proc-macro-api/src/version.rs +++ b/crates/proc-macro-api/src/version.rs @@ -120,17 +120,20 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result { let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]); // Last supported version is: // https://github.com/rust-lang/rust/commit/0696e79f2740ad89309269b460579e548a5cd632 - match version { - 5 | 6 => {} + let snappy_portion = match version { + 5 | 6 => &dot_rustc[8..], + 7 => { + let len_bytes = &dot_rustc[8..12]; + let data_len = u32::from_be_bytes(len_bytes.try_into().unwrap()) as usize; + &dot_rustc[12..data_len + 12] + } _ => { return Err(io::Error::new( io::ErrorKind::InvalidData, format!("unsupported metadata version {version}"), )); } - } - - let snappy_portion = &dot_rustc[8..]; + }; let mut snappy_decoder = SnapDecoder::new(snappy_portion); diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index e3aa880d00583..f0f1900c78c56 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -24,7 +24,7 @@ crossbeam-channel = "0.5.5" dissimilar = "1.0.4" itertools = "0.10.5" scip = "0.1.1" -lsp-types = { version = "=0.93.2", features = ["proposed"] } +lsp-types = { version = "=0.94", features = ["proposed"] } parking_lot = "0.12.1" xflags = "0.3.0" oorandom = "11.1.3" diff --git a/crates/rust-analyzer/default_12483297303756020505_0.profraw b/crates/rust-analyzer/default_12483297303756020505_0.profraw new file mode 100644 index 0000000000000000000000000000000000000000..e49d7c144922d09aeca6dc5f31c8791b6c0939e2 GIT binary patch literal 25152 zcmeI4by$_l+wT|MA<`fz-3^MQAl)rUmjV)sfJ#VrH&W6donl~sGzik5bc&=BqMU_$ z&3-rQxPJWJ>pky#uIpT9{ju*g^L*y}+;h)8^UN~~x1X!cZC7W9V@$~J4-(y97BUL6 zz$74pZ+}^;i+@?Owd3EP#%?0<09^dTs#)HtPfsqz z_#%~-U_LFw@iey+_~C>Jbm$aOf9t@SLr`H~;(|z@xS;baqOa?Wm*QZeYQXLbQ98|| z!1#ip#kBitJi^yh4U=zPcf$~c@!86!dC)B_1;KO0JBJ2Lmc+C|*l+R4VZ52jpLso8 z`5K!=K|J99{_0jOSE_}ALc=`5qYJ~Coq@f`F~ z81Jign#YIrw+{%ikPziuwGy&%2dF5snLyaoc^`ay7^#63L@yXESDiIQM+av1N7 zfH#->rD5~hqY*7cRZ01f1ATJ^dMD^78v%b$VCUCQSvmp0x5Icsoj?2UXuvEvofO{y zmHa)v*)ZN20nhbRS70N?^coc*CgUqbCo~Tj9|Omu!vuoEkKE)@AA|^ivn_TcTVER(}r+I9cKEbvu44CDI^PV;CmUSlS^ zG$B!)sc67fG3bW~fgOzJHTp9zUh`NgxioGI%CAgbXNQ=YppXNb6t8+MF%u}%rgM1K$>4SL!EZ|ulH|-wBP{#tE5XSqO{8_(nQ>Ln6rIQly z%gevh=NRY$3-lL^HJW4TRJ8!#0^ay{&-{Hk6pj0aT3pup+|}MyfEGu#y7qo z{rh=x(r3pT{f~dlHThohEl;Dt(L}*S;15{O_ijBVC?~e+1^z5A{S{AMvp@S&z@Oap zCTv}DYcizhWc_b}@gVNNg7t^aWGLWGii>r{p{~0)o;g7!9Nz--|0F&q{-ci_eEpSG zUkUt4f#ZQb7~ityT;`t@EY^T`K7+r6Oi_g!#Q1kS72x=GxOhqsJh^jSVUrm6PY1{M z!}%|=I?`?79`k~naLe=law7y2jtBmO`Hn0e_$t#e#|ik)ihu|GEjASJ=BmBU4)sU= zb!6lMC?Q7lA)O|DtIN(HvF<8ql|c@tX+rWpZZo=6N>$ z&L1fR`oN#_kkQu*5tHgbpB%>X+Wy&peJ13 zz@LEcKZCC-#;EF(e!vapV-W%#%*Pv5PR@me^{YU?9RUyGbJ0nE=t0!CzvG{M22WXm zWUS{rFYQOP_3LDP_CUac^*QI4a3gXaYmW7vD~9Q{S$e+vR0 z_`i4~Jlks+yGzhJyZ zmUV=a+LB$Nqx5?}>;dCj;Pe5X>ayces5a3F{Lw(55BwQYl6$C*av>G)qA*^=?a%(_ zOhqNfGD+eljBAbBVH4xg0e=wi-|7zHr>d?dL;L%C|APtV&nVpU`m@ZVr!1)+M8Kcv z*+1te@L$s3U9I6O5)O!GI|3fWr*&A4r9{^)4Di`!@Po78lnWA!+Q9zV;|$*I{CHl| zxutTzYn;L75pS6OGBe)}vox=cqS397fQOvH-%2I;l1z7vKnk^J$*JTM(&)^; zj^`apTGljX0u#WuoWWaK=v0`f+Dibw=nTG(yOpEU|7t7XW6t1T9Cuqrr?u38{(2zb z!T9#8=wh#5SxssdIqHx#n!(mMgCClZK02yAymmj)l;o>o6FMsb9=uPXK6m9mc&h0P z@(1wF_fO9k;7>NDULtlJgAU*!F#Spdd~!-{{~ac_Nam@F@9m-c!_n!##%mLf2l?;a zU+#^we*E|P-vZ++AN<*$J<0_4|_SXan2!K8>jL-J|vwr2PnkXu&pBsS3gz^0d z_&UOcSbmLjf1ig;fBq|;Qa*py-*-~Tp#M}Pxb@;I-O2e(BaEl>J`OXDAd~kc(hEh$FCAurK{{cX??k*;I%Z0bmYJL z8*-Y5otr`|TOaL8{P@=75VuOdT}pz}4%7DyJL;~RO}AF!YFUUa8?O?-O?yRjq&dF3a5Ka2;uz=Hl}#ul!K4PDL#@oWKn z>}h=v{|S?JdT)VV3#b#n^D_m;^CIB=ONEn<68Px=?+oKL5b&8zt%tN2@!Nn`fbqTv z_!5<2n#1|N=G{wIE}X=Z4#ro)@gSbP_BU=2cRW{K;vS!GSjoe}gz+FQz=HUEqmEu2 zdp7q;>#&0%P^iU@pL?KVK++LpjWcH zk)H|TPY&~EI{7pY;=i#Q-8+k1kOlZn1pWX%*xkL14Naf%lZF(%L8O=r0(4zB28!KH#@$_Hoxf zJ>r3$|L>EPrW5$Wc;EEXJjiF?B`N2&yu2ju*u}doO+k2OFkUL-G!OJOZVgyI>Dtc{ z`cyY@a$cta^dFz*K|ZY38i+g4h~C#4*1goM#C09MhXA4Q&f4*Eo*xlEe6Ir=bq--Vfj4Jv3mFUM^#j; zz?y8^CRXf95dWgnJa}HX#gen9uC9yE`}nDsLC?cmV0?D*X&#)1*IqwRoIdYC0Oc!A zo-d6szNO?e5Ava_IAcY~V77@08DPDRM0Y?=LAZUaIUg5Aw(0 zJx=S*>HrkLgM6+;zy}A&mf(rBG*0VGrktFw+QIbco}ShR`A_Fckb_J4wjS^bz@PFz z^B(MaecspH=0QHN!uS?A9^^9%Y1Q4z-Os+@`2~UTkc!j#fM=s(At1h6-3s_oSU#8` z;Ei5MiMIGeXkL3ElsN_cJ)#oEx4`jWd^gmGvLV}f3{96kM`Eu3L@5IKS$WzYz;ksH zne6dndV=|q4db(`PV>P33i`6=B;P9kK5z5@`Uv01M)Kv*+63)cWNc_!8XfDf5~Ayq~P7*E*vXPzv)0zdvW$rRvYU_7Mh&-}g{dkD|#OXtDw#m+E(6OIS*6l}Xp zy}OCZ4c?E;V0{0x)B0fkvPS1qL)tF92KlT2<9(Y?^B{jP^YJhb7^>dd&&%CAIsc~v z{)3GJ%!2i8zo+AZl4tXEkUzW#c%UC6m)tgVpoa{2%rkfjlcZnKxXPm-|EI_Q8ecef z{{DM}q;0NvccKCoz&D=3*WSeyF)g981ANRG{1!{HzT6WbB*5F9!JDQZ&C6q9mWjGY zW1Q@7r4aBSpRH_aA2Aes{{`J|kdQ<74|E84FusBtLDsLv0~5NsbGS~{TTB?A{rb=2 zYd|HufMii?4)S4o>|e*zkY={NuVnfN@Qr8i1nx&go$i-pq3@5`SD^c`m^1i~3s+ku z$P3#4j^`QtP~A&%NtD@ZAb+IJ;5S7Q!mm@VrU0J&48GJ{eQ6t2=x={EPcjesL6mQw za|O)?`N4ci%xEw-;&3dZJ_&yYy>=bKaC|Tq_qvg`1?Gq2*&%i{ds&R zK3Ml>S&!KQe|!<>gZX&==#78p{`oi1F8+SsF@W(>@BggNedmBf?1=pD_ZQwX^j9w0 zcJL&aPy&6-GkD9fgg{5<>m@ft}zOv&q z5905IV>a99;NTv~WU{%B^BIE|fj>ZhAp7od@;pBM1MF|;DA4y`a=>?=)(7jeZN@?+ z9($~ca|Np3pg0dQD~#XlInCq20_k|vrhn|-PpR8}o!zbBre5?V-gYnwnnn4jEB!>QdmX6C($v_*R5Yuj4t`~Cyt=*Bj8zJ{NYgK zlCV|c$y4-q{zG8%D|_~|KVZDpEiK8@%~NFg<{>sG`?pb8{QKc}(BJRU{n*3pv_9a;ZZ}$ue}5MN_#zk|v+!r0I1q(+x|0?Y@L)b}F8-OPxXsUEsy8K;M4UnQ z8v4HM0n^u5KFx#m6Z9KC-k=PB{j9P!tRXxH^nld>Ul3T3AHX8O{@bd-c!3+hmq9(S z@cjkr3pm#W@6&(-7Eyap4=mt;E~x*nS`c4g!F_-(qIz&0-om$szYfZP16Bfd0ay@k zVE^5I13hpbU_m|T52%MPgL-%i+5-!J{ogJBceOuD7rY-K@)NWN@c?DOgWtD68CXR1 z@b`l+!?y?Tx1b(act1e>Sr)z@i2MXPi0%XGfd%}3*8W%9|7Y*_@A7B)1%DmB41XW^ z>xjx=UW0K07G4j&4BCNuP=+60a9>al%J3HGfO^0K3x6GOXVruLAmTxLcnf&YFYw#| zWne)&P=;>@%76nFtgqnr2T=b%Wr1Jt_TRlfd^=DE?cvLy9@w+41MVyf;tp>?JCHx% zI;cnV90Pspu7i4DfuEoZZxOYF=itkr9kB4%K^a^J7Sx|* zK^}v4ATFRDQG386x(;-}b$C7aGWLec zmqA_ui>MvA4%)+)K|4?n%95}Kz=HPh7VzS*3&6s!Pe32OJtzYmQ2w8`@ayAQ{(|ek z0)PK+_XGIx|Ly-95dirF@(ld@N6cMji+~+I{cwm7pyao6D&sumL z`0~H22ipC+{QndO{Q94^i24WofM5SW8MFf&C<6;$56b^ptEufV9v;0dpxDc!B*e|i z#^1-q6^b#?7PQ4KLC>DrgeJzGFOSa7u7<=e#m0O2YU`D&t>QJJ5*30h@B78ye(W9) z?dczw6#T&2I!PxqMgkFkOu@B>RFC8_ZXG3!^M)5UaO>4Bb(+)AdJ(@>(Pcb>8%p|@ zxjwqzA=&*rfk*sWR>8y5^Wa^RG$)!^?Ny^S65s9NnrbVvfV$7*fTvL7&Nz!qhl5*cX~L+75==B zUs3FdmOhS)(>`wCK1!=?0Vh4*FRUY3881(Ydn9U-mYoHcHY)qk3dpDpZ@fVAidHK+ zLJERBO<;h&~M*?gG>=#MqQj6S?7<)Kn zOAQmN)q+*_48@`7{QTx3KY6qiI`dc$uTtOlpTlsPP>bwEZh?A|7o;_9BK!bO6c7OUdcJ4O5mX)vvpuDSv1W=!E zDb{#^EAx4->+@=fw!Ed>@--c`@5gc{+1Oh+a5L9t{TDJxh|#%{Z|H3x%k{Aq$|f(i_}aDpl8trO2ev!%WB8yQYGUWYL^3S4D|5R)5=HTP!?BP&SRq0hw=~Rs5=~3=gQ0-My zqiLTvoAXn_= z)p6iyT`b4s<(28lbU-JR!~T_?oz5W+ix?`XpdzeTr=lbv#IGpG$1kGph6ZVABdZKX zPK6H4KqNZA;#+Uw@tzV5*wuZb^@fejZUIz(C?sv9l1X9?1DNqc=d>J3RS^UW4G7hm< zG`jpTFOJ~(%iKE?l(%LyZq~NdQ3>6t)|AYfyJKja_Uq;DU1`c0Wk0kLEoY~+?|A*g z+PZp%u-=BA^j32VOG6V2c~*-NCGnjFudZ=iZlS(LLRN;6aakc*oKZr0YO+CGYF6Pa zO|FGrf>DgAj)h%jtW8W@LY!5MMMm6Bv!o=u8ElBT7!_I)k}tI1Og&xLZ+Hm<%^xHi z64(!PSl@PGoEp^?p>GSt9#R;1`6{hkLUhyZNL6&h2p;j)1k>LrwZGE|sV~_Cjk^-5^RfPxl~xsr<;@;le>%?k!$l zEQTrbqDKC{#xLaLcbw-RkUGA0y&!+U`DX3w1-mN4Yw=tK3NHta1>Du!>=}duf*$l1 z|3H40ti`c}8i5R5_+qc$dP_$hwBmD6F84&(rh$>mpF2T|e3zM=K$Q-I4Da&MVJoq` z`I_18tSsTftmGvQbu{g_c0~l1GD@Qta#oCSv>%zje6)V9eZ09IYvKA7>fB?Upu9TO z)M*vhsNxSK3kU2gLu<`ceaxCDzr=%sw{@&M6eA!3i=qLUM_u>2kz>8ZxYf~@&Q)ty z$O!CWIv(y*x!>2QqnW`V2vR$d~H$ zH{Ho)18;05%Y@#BO;a)N_sO&pZubU8`*`@iwMlJE`+OIZfxIK&^&>0xx4XHrlX9=( zT$)?z&UX`&3_T|fx3&oEd<9{ddcl~47ah3~e>T&ZQTVtl+FGx~ksylvR?fCu87w_Zuxb7Gf1fTtAC= z!E935orsG#C@8W!dBpx!%5K^_g39_3^g(Xt$FkFS9^+F+%JJj!gylzMy(ONKuHhri zSyNFSM7QdspE*dQA$Jyqf=RxfsV6PZE^p zS?&Duyy25W^EmmY=j9^IDd~s0ctzwfcL#%BstpUP2?wj2gt(RI(c0oXQ;YBMD{yba z3O5$3rB^JZ%^csbEyyt{vI^9ClkF^>x|a2hqMs+d3yaN;$Si2gHz`_!k)=nRR~3`b zX;WG#$)t;7L-TR(>WV83Cc}1jiL$QHtrF~sdi&DVXOdS4OhiKxMjuFfts3quOoo(x zE4f8V_1;M`FBK^#w=4D_v5%=v@7LGm?2hH19e=-tx8!8` z7+;4(bBsDeIZ5EW40~}&YfOnh#kVgP9{Nxu-QRc3)cjacP#U%CS?4uTS96H6@Sgj^ zq1bY8PZRGl`MsPE(iV^;BIb*~E<}iN5_}#ZXMEtSW!2A)d8f)O!fZQ}@BLPFu|RV` zNlE(Q!n5Y5VHGU#A{8tXtHdl7!&7nItJl!Kv7FWq|VPRMpX zQ5%9!NPa#SHP&X2{_u9=o4q17{mZiTg=9O-K?l-Tt(~qEdJuKkq|%Yh%*qyj$6DE> znZf9J$#sJod8fmTgM7Q)<6On-fsa9bgLpQ#-z?G6)F&A4UcFT8jKxtWI_(t@x4X1L zF`UJkC(}HVW1F2;l-QxjOQJonTAiAd^TR;5VrAdq@xAg0+`t7ol+c#KyrZhbEqd0DOW$fKG(`8qz|l3CcXG+ubkL&Pk8gu_ETM+Oo}JsW`7*V! zI;w6=kuG{$cnMAV!y!&m^H+EL#yat0mdP8lA z{R+QmC?%BT)n4YwT~Su_`DMKKZBf_f*_asrO9AL z>l+;kU52?(j;4Y~sh)i@EFdDOQNVkiCoD034VzI?)sJ1{6?tY--rC`yo`FW@{az_e z0ck%?hQPx&-WL8`2e*P}^e8Q({N@cgFz5@knufh21liPDDcU##F;U5g2(!3C1ir*E zh5El?-rJy-R+0JJdGUAv7~OY7`IgX5{UF;lXN%;N3<)X z@PmpNEol^=#!Gmgjj<$E@my(PkeIxK$1l7?2ntv{U!5z^|vcyvX0YoNj8a3K1fuWQR{wowpdvh z=%g}yNRlx5q(cpjg(>z+T$THSC%ctRpUBAs`q`ffkQ~RWy0ArCRDp; zR9f^w2T=Yr1fh5$Pp^*rxA>NyLDStxj}Z}X zr5e!czp+oo&~u$D%$K*0(6C@Hx*v2AIqnE4&T8zA8Jp)`QL+bre%|AyD}H^{gM$jX z?GccQriM@VOS9zJS?^5RF2(N}hq1_{81U>dYdF?m8sa-U-YO*EVU0e|-T&lXuZOvZ z8hWNqBeG+jqO^7ej&UgqiIcx7_LaV?>hWFQ6$7t@KBCBaZ4{qJovk^n%HyPXZ9$N| zvRl>_mx2K0+|tiGUvLKAAL~o;3gVXy7_mj(co?tr`MZtf@gCYr**$4PSK>Yf<6|7U zoYwMTq;%;1=WZ__0rV9MZF=7{1#4F{?1}7MzBa+TF7yePwJsQGm;8EIRfE6rX?tP& z=o_jh{SIcAS?@wi`)w0~a(C>M;*X@ScYE57V{r*TpjN9@6-fMwS(KP+li_;OTEbs) z?~-A;e%Nwr)%o11a5ZrbCYc)?Y68j>g?XROQ z9|W<|E;b6j_7i!rqx0>Z^RUrd5~Na`_a(t)8P@yN=u?h0p>O%a&5hn?tG$odm?R>Y zGi7M|_F&4Q;EMHo{}HZVMawVbwJB|kmx_gXqf~F^UmQ*dI4@03U-0ollXBcw^WBQO zH?5J*4ZE~Y47ir~Dp-dOYh~QS{Pl{!q=64VndZT_M7beUz85SP4e71v?K+HlSPgtv zF{o3$v~11(+VhBn{oe9)cXX%^4{|7@5t$cv(B%-4*pgf$*&`%Lgzd!>GfWdo3X@GW z$_kUA8}YP)!X@NfsZN#G$U%z~%!)~*#r8HqNge1(K{laDMrHiOL zy_&KM;k!@&AS(7~bZIxQ*@ey(EAz*dh>IR677iE+_ldsKr1i5EqTDQgB<$)oT<$4H zIaIwK^)32+J9Ed_o1zj^a@{(O5_W4&4>T;so{U#FUa)y6n-5K0BgYri8EP$7zyCea zB>5RfU0Nd(<`>uZzXEGsU*XEo8erhMN*?}5_(OEnI1d2xHJw=75Z+JB>slq8RB z4L^s!yBJ7{tiUFSj&dnq&uhzvDfRQfRjhE{ZPe$nAx7EmJ%yO}MHc0r48$ov=^|jb zfHvmnjqI!&#>v9-*U)lvEAlCE)VHg0DMDXq6$J(RcvVydM5+aqghkX<`S}HvUC|&| z3%HLekQ`vIVUfRI!?0Yn5x#gvwMrM;(*;7`Jf()D9Fh#EO%u{=>@v+QY;4TTY;MLn zIGLq7*w{^XlB<)rsd8y?5k~u zvAgMDpJHQWk&xgJlW_AZHblRfvHLsfFX+PXB351fEiZ0s-A0^SqK_j{x%LdQ9noc% zV(Jmyl1S147604novbo;3UhBd;=X;_>8sy<&xkjiJGz7Ur1>VzhEP=O$AnzZpafR@ z?|yU^*{u6g6~&&90;E%XnZzoTuO=Ay#eHLMHW{m&;7S(=V+ba0wN=ubyzsr)6eHwY z*W$T@6@5;q5wEeu~L-Qi^<9E!^m*>o)#k5exn?|`DkeA(zO?+bi&{XTZjGu^8kA3k^ve2Z)DU$k@CarUZ%6jbg-EHJhhpxSXwv zO1QiOuiE2ZwKv!Bx-aa)jKgbDUuWWwl|J#Eojc*HcQU(FomFg7*;~B9=kH?bSk~I@ z%P^mnHug`t(~&pdNU#~1dm3mGc6hBf12sX9#;rLi2V=U2hvJ|(NF8=EbTX|4S<5-2 z)O-7SdirWqlvH}T*_2e&ph+Ih!_LJWs;;Ek*B#E&+s=brZC?F*fo5hyCsmZUVO~`1 zkT0+)hvX1Y^KBr7L9;wx_4f(MJD(1*c}>r@den?OD zTg=GFZrAJt!}PM;%zPOBW>jz{rW_lt0~&07)0 z^HNU?Nbii}(3=R{W4Vk+cJSkyOId7YPRaw{_mWwOulaKy3zQ_56a|rK6-&bE4!~X4gl91S4g$Lw*sjTGwtL7(_a1 zC`u$v#_>#4=v9p8>PlE2PyR5h8CK~IFnY6z*78-PFcCEzns1RYswWFTP^(YJ*v)D| z+H3vjWC4U<-sQVe1?#RRv9Ayw-H)Rq^uOR9%Cxq%)AscSjh?-WYnwF-?aq_shbqdm zUL$ckuFFp~3&?^ddUW^5hMiP)0%%i{P?Xm4Zau%(X>d>J)>PFGE~>qqcTu_KvLUFv z0dJ{V(WWn5iRF=6MCyb{ZPhe$l|Cn^8m@gepq=teLE^ozRbJIoxtoe^<~PfTrYOdG zKImfkeLS<~Sl_gMszVstbApj2l(Np* zlttul*3p~cw&iFteMSK$WX}ljE=Vfbvo-VQB2l*LKu2Z+-S` z(zCOLrx_rt?OUU(=2cDewPYw-jL$zN;6nPvo+r4xXJFTn_X=wXx>9e7zwf52Dff@~ z^{Vy=@hEY%D?(>1G*ZGsDk^*eYQp@2s`b#YxmicNFXtT-9C|0(bFr0GjIC9isdbbD zA{$Ss)Qfa-lp7h*h5rcJ<+)eg&rP3=55)wVn_qjL4B56M^k6cy=#JSKZ0{j0Il#|31rFO|&hU*VH6ggo%VF`)1jiceAqtlOH`|>>em-di&*g2IT}AriI#vDTc|V zDTd|y#d27R3q`03DGI79^9k_PE2t@|2?^ang9xZyDzUu&k6xerD#6derqs>JRu8E< zue=@3DV~EHU)%khcyK=A-ulb~fQbS4fDI6{Gj8fbRruj659|8XGRT^TL*PUQ}9X`KwH> z=($NBugCX%Ox>+{3OHr&ba{NaQsP%9N za4U22Lw`r%hyFaHq@<>#tjZ^%tfF`q4Kmt*^T`O7h_wprJ>80-JZjvXoUEZ3Zn&!9 z+RC%>J8o_Q%CiDH1KW|Dv${$>@j5M6UDdo@>^0eA9_J!6X{oBH#K-k;X~joG z;&O2rBR$umVVx4{!|)hiH<*Z;5GIs6ZtX)4zW2RqC6|rpqEuD8tSPFiP2*Y_r=zuJ z!$buSlj(GxoF&KoHD-c1+|b=_v9~iB^qFC-Xg*3^PY~M zVcUClWZQEs=d#&sVg1_p#7^@q2`RA$w5B8t>PSBq-}0hdZD?%#!cl<3y<8@pnA_F% zNKbgXOXfXcuA2k}0X0|avDl@FO~<=;wSAccL&pNV&o!`?bS>R~$v6#5Xw^%&mfx!f!zOIRT)atVh z^f_t;G!^<=h02GLMJ~E-0x|0f%(nw#Lby@4vW3@QM*5R8Tzo@EH2D zopQ}q(A{REd&b^_4$tupLo7pU1~iKXV)NKWAHRs^e`ZQ}vw_ZzoG;-0MHSM=?OKZY zTbP*zO}oG9%@0S8$QuG5OG5v5)0(q-^JG*hxzEHW?+>AkCPg^b5t7$v61^bgmQIOl z{Uok{wm+$TA-J|nO3c5%SDaB@_v^TI-RMj9mxs&Mq;$%|R#}N2@-NfkL@uj0q!+Ux zvzI!2DE!jVi1i8GL#pQ$*`Y&Og4TJ8xxkst&um}QJ!%?59@^iq656jbxG_>wN31^4 z5+OaRGvIPOUN;g$9Y^>^L6%|OvPw+y!i08)zEk46XXT~b!s(Xrxf60)A@cYASKf7h z(xy)oiw(0K9LOBSi=ti)IzGCCbrAQwQzJoBo1UMhDIiZb`pv{#S$%QO+k&j|tb8(p zlE`1KYMoXY?6b-m_@520r0VWrmA54aon!l~oP@y|I?_u^Gy{ zEI(>bHkd`y&~2b)e)Ffav`5dcTlaD20qz^7y`U=BTeg9 zRB$xdh!UTXhoiD@R#DnXhE`W;Rkg2mJwcvBUZ+U;x`Nwk#mh3n7~GF1@~U_Ai0|Rh z9`+0C(j9r00#gn3JMJ0hgp8aVEJUco7sc^RHUDFkBudoAw;W8> zJvJ%g8-h8uOx7iXR9sMM7?7W6=GNNM$iKR?LgRa_s6N0|ST2P(giQTBmTcqGmQnJC zCF;`1^e<$KUc5nkQap!R-v;VNXub56u9hPEoxD4BD`O+`;X*W^zwGiBN2YSip^MDQ zgyD!VscIW{V}xDuxW6&|9Oe0LV_d{s?xd9}tv0TR;YNXTDB%q1HPLHQ6&u42-uWE` z`43~WE;fy3)}~xgcp&m+;1$U|X*qrzH+N=l+5Dh50fFy{_up3Ll<1UcXfj)$A;|Doa z@wAD0v ServerCapabilities { ServerCapabilities { - position_encoding: if supports_utf8(config.caps()) { - Some(PositionEncodingKind::UTF8) - } else { - None - }, + position_encoding: Some(match negotiated_encoding(config.caps()) { + PositionEncoding::Utf8 => PositionEncodingKind::UTF8, + PositionEncoding::Wide(wide) => match wide { + WideEncoding::Utf16 => PositionEncodingKind::UTF16, + WideEncoding::Utf32 => PositionEncodingKind::UTF32, + }, + }), text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions { open_close: Some(true), change: Some(TextDocumentSyncKind::INCREMENTAL), @@ -134,6 +138,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { resolve_provider: Some(true), }, ))), + inline_value_provider: None, experimental: Some(json!({ "externalDocs": true, "hoverRange": true, diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index 60a7f99ccdb85..3fc1aa4eaeb4a 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -11,6 +11,7 @@ use ide::{ use ide_db::LineIndexDatabase; use ide_db::base_db::salsa::{self, ParallelDatabase}; +use ide_db::line_index::WideEncoding; use lsp_types::{self, lsif}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace}; use vfs::{AbsPathBuf, Vfs}; @@ -127,7 +128,7 @@ impl LsifManager<'_> { let line_index = self.db.line_index(file_id); let line_index = LineIndex { index: line_index, - encoding: PositionEncoding::Utf16, + encoding: PositionEncoding::Wide(WideEncoding::Utf16), endings: LineEndings::Unix, }; let range_id = self.add_vertex(lsif::Vertex::Range { @@ -249,7 +250,7 @@ impl LsifManager<'_> { let line_index = self.db.line_index(file_id); let line_index = LineIndex { index: line_index, - encoding: PositionEncoding::Utf16, + encoding: PositionEncoding::Wide(WideEncoding::Utf16), endings: LineEndings::Unix, }; let result = folds diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index be09938c2c4a4..f609a50a05faf 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -33,7 +33,7 @@ use crate::{ caps::completion_item_edit_resolve, diagnostics::DiagnosticsMapConfig, line_index::PositionEncoding, - lsp_ext::{self, supports_utf8, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope}, + lsp_ext::{self, negotiated_encoding, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope}, }; mod patch_old_style; @@ -999,11 +999,7 @@ impl Config { } pub fn position_encoding(&self) -> PositionEncoding { - if supports_utf8(&self.caps) { - PositionEncoding::Utf8 - } else { - PositionEncoding::Utf16 - } + negotiated_encoding(&self.caps) } fn experimental(&self, index: &'static str) -> bool { diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index 55b89019b47a8..415fa4e02f20c 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -3,6 +3,7 @@ use std::collections::HashMap; use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan}; +use ide_db::line_index::WideEncoding; use itertools::Itertools; use stdx::format_to; use vfs::{AbsPath, AbsPathBuf}; @@ -95,7 +96,8 @@ fn position( let mut char_offset = 0; let len_func = match position_encoding { PositionEncoding::Utf8 => char::len_utf8, - PositionEncoding::Utf16 => char::len_utf16, + PositionEncoding::Wide(WideEncoding::Utf16) => char::len_utf16, + PositionEncoding::Wide(WideEncoding::Utf32) => |_| 1, }; for c in line.text.chars() { char_offset += 1; diff --git a/crates/rust-analyzer/src/from_proto.rs b/crates/rust-analyzer/src/from_proto.rs index 2dbb14fcd9a67..50af38cd6fe37 100644 --- a/crates/rust-analyzer/src/from_proto.rs +++ b/crates/rust-analyzer/src/from_proto.rs @@ -1,7 +1,10 @@ //! Conversion lsp_types types to rust-analyzer specific ones. use anyhow::format_err; -use ide::{Annotation, AnnotationKind, AssistKind, LineCol, LineColUtf16}; -use ide_db::base_db::{FileId, FilePosition, FileRange}; +use ide::{Annotation, AnnotationKind, AssistKind, LineCol}; +use ide_db::{ + base_db::{FileId, FilePosition, FileRange}, + line_index::WideLineCol, +}; use syntax::{TextRange, TextSize}; use vfs::AbsPathBuf; @@ -26,9 +29,9 @@ pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result { pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result { let line_col = match line_index.encoding { PositionEncoding::Utf8 => LineCol { line: position.line, col: position.character }, - PositionEncoding::Utf16 => { - let line_col = LineColUtf16 { line: position.line, col: position.character }; - line_index.index.to_utf8(line_col) + PositionEncoding::Wide(enc) => { + let line_col = WideLineCol { line: position.line, col: position.character }; + line_index.index.to_utf8(enc, line_col) } }; let text_size = diff --git a/crates/rust-analyzer/src/line_index.rs b/crates/rust-analyzer/src/line_index.rs index 2945dba12f255..791cd931d42a6 100644 --- a/crates/rust-analyzer/src/line_index.rs +++ b/crates/rust-analyzer/src/line_index.rs @@ -7,9 +7,12 @@ use std::sync::Arc; +use ide_db::line_index::WideEncoding; + +#[derive(Clone, Copy)] pub enum PositionEncoding { Utf8, - Utf16, + Wide(WideEncoding), } pub(crate) struct LineIndex { diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs index 08b2c837de370..e33589cc53696 100644 --- a/crates/rust-analyzer/src/lsp_ext.rs +++ b/crates/rust-analyzer/src/lsp_ext.rs @@ -2,6 +2,7 @@ use std::{collections::HashMap, path::PathBuf}; +use ide_db::line_index::WideEncoding; use lsp_types::request::Request; use lsp_types::PositionEncodingKind; use lsp_types::{ @@ -10,6 +11,8 @@ use lsp_types::{ }; use serde::{Deserialize, Serialize}; +use crate::line_index::PositionEncoding; + pub enum AnalyzerStatus {} impl Request for AnalyzerStatus { @@ -481,16 +484,22 @@ pub(crate) enum CodeLensResolveData { References(lsp_types::TextDocumentPositionParams), } -pub fn supports_utf8(caps: &lsp_types::ClientCapabilities) -> bool { - match &caps.general { - Some(general) => general - .position_encodings - .as_deref() - .unwrap_or_default() - .iter() - .any(|it| it == &PositionEncodingKind::UTF8), - _ => false, +pub fn negotiated_encoding(caps: &lsp_types::ClientCapabilities) -> PositionEncoding { + let client_encodings = match &caps.general { + Some(general) => general.position_encodings.as_deref().unwrap_or_default(), + None => &[], + }; + + for enc in client_encodings { + if enc == &PositionEncodingKind::UTF8 { + return PositionEncoding::Utf8; + } else if enc == &PositionEncodingKind::UTF32 { + return PositionEncoding::Wide(WideEncoding::Utf32); + } + // NB: intentionally prefer just about anything else to utf-16. } + + PositionEncoding::Wide(WideEncoding::Utf16) } pub enum MoveItem {} diff --git a/crates/rust-analyzer/src/lsp_utils.rs b/crates/rust-analyzer/src/lsp_utils.rs index baa77a005e226..30f1c53c198f3 100644 --- a/crates/rust-analyzer/src/lsp_utils.rs +++ b/crates/rust-analyzer/src/lsp_utils.rs @@ -161,6 +161,7 @@ impl GlobalState { } pub(crate) fn apply_document_changes( + encoding: PositionEncoding, file_contents: impl FnOnce() -> String, mut content_changes: Vec, ) -> String { @@ -192,9 +193,9 @@ pub(crate) fn apply_document_changes( let mut line_index = LineIndex { // the index will be overwritten in the bottom loop's first iteration index: Arc::new(ide::LineIndex::new(&text)), - // We don't care about line endings or offset encoding here. + // We don't care about line endings here. endings: LineEndings::Unix, - encoding: PositionEncoding::Utf16, + encoding, }; // The changes we got must be applied sequentially, but can cross lines so we @@ -256,6 +257,7 @@ pub(crate) fn all_edits_are_disjoint( #[cfg(test)] mod tests { + use ide_db::line_index::WideEncoding; use lsp_types::{ CompletionItem, CompletionTextEdit, InsertReplaceEdit, Position, Range, TextDocumentContentChangeEvent, @@ -278,9 +280,11 @@ mod tests { }; } - let text = apply_document_changes(|| String::new(), vec![]); + let encoding = PositionEncoding::Wide(WideEncoding::Utf16); + let text = apply_document_changes(encoding, || String::new(), vec![]); assert_eq!(text, ""); let text = apply_document_changes( + encoding, || text, vec![TextDocumentContentChangeEvent { range: None, @@ -289,39 +293,49 @@ mod tests { }], ); assert_eq!(text, "the"); - let text = apply_document_changes(|| text, c![0, 3; 0, 3 => " quick"]); + let text = apply_document_changes(encoding, || text, c![0, 3; 0, 3 => " quick"]); assert_eq!(text, "the quick"); - let text = apply_document_changes(|| text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]); + let text = + apply_document_changes(encoding, || text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]); assert_eq!(text, "quick foxes"); - let text = apply_document_changes(|| text, c![0, 11; 0, 11 => "\ndream"]); + let text = apply_document_changes(encoding, || text, c![0, 11; 0, 11 => "\ndream"]); assert_eq!(text, "quick foxes\ndream"); - let text = apply_document_changes(|| text, c![1, 0; 1, 0 => "have "]); + let text = apply_document_changes(encoding, || text, c![1, 0; 1, 0 => "have "]); assert_eq!(text, "quick foxes\nhave dream"); let text = apply_document_changes( + encoding, || text, c![0, 0; 0, 0 => "the ", 1, 4; 1, 4 => " quiet", 1, 16; 1, 16 => "s\n"], ); assert_eq!(text, "the quick foxes\nhave quiet dreams\n"); - let text = apply_document_changes(|| text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]); + let text = apply_document_changes( + encoding, + || text, + c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"], + ); assert_eq!(text, "the quick foxes\n\nhave quiet dreams\n\n"); let text = apply_document_changes( + encoding, || text, c![1, 0; 1, 0 => "DREAM", 2, 0; 2, 0 => "they ", 3, 0; 3, 0 => "DON'T THEY?"], ); assert_eq!(text, "the quick foxes\nDREAM\nthey have quiet dreams\nDON'T THEY?\n"); - let text = apply_document_changes(|| text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]); + let text = + apply_document_changes(encoding, || text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]); assert_eq!(text, "the quick \nthey have quiet dreams\n"); let text = String::from("❤️"); - let text = apply_document_changes(|| text, c![0, 0; 0, 0 => "a"]); + let text = apply_document_changes(encoding, || text, c![0, 0; 0, 0 => "a"]); assert_eq!(text, "a❤️"); let text = String::from("a\nb"); - let text = apply_document_changes(|| text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]); + let text = + apply_document_changes(encoding, || text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]); assert_eq!(text, "adcb"); let text = String::from("a\nb"); - let text = apply_document_changes(|| text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]); + let text = + apply_document_changes(encoding, || text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]); assert_eq!(text, "ațc\ncb"); } diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 346a74e270f99..d1e38b33c7de2 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -831,6 +831,7 @@ impl GlobalState { let vfs = &mut this.vfs.write().0; let file_id = vfs.file_id(&path).unwrap(); let text = apply_document_changes( + this.config.position_encoding(), || std::str::from_utf8(vfs.file_contents(file_id)).unwrap().into(), params.content_changes, ); diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 5ac5af94f5aef..abce0d7378277 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -268,7 +268,10 @@ impl GlobalState { ] }) }) - .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None }) + .map(|glob_pattern| lsp_types::FileSystemWatcher { + glob_pattern: lsp_types::GlobPattern::String(glob_pattern), + kind: None, + }) .collect(), }; let registration = lsp_types::Registration { diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 5bdc1bf8d9bb7..92029dc1de78f 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -31,8 +31,8 @@ pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::P let line_col = line_index.index.line_col(offset); match line_index.encoding { PositionEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col), - PositionEncoding::Utf16 => { - let line_col = line_index.index.to_utf16(line_col); + PositionEncoding::Wide(enc) => { + let line_col = line_index.index.to_wide(enc, line_col); lsp_types::Position::new(line_col.line, line_col.col) } } @@ -212,7 +212,7 @@ pub(crate) fn completion_items( tdpp: lsp_types::TextDocumentPositionParams, items: Vec, ) -> Vec { - let max_relevance = items.iter().map(|it| it.relevance().score()).max().unwrap_or_default(); + let max_relevance = items.iter().map(|it| it.relevance.score()).max().unwrap_or_default(); let mut res = Vec::with_capacity(items.len()); for item in items { completion_item(&mut res, config, line_index, &tdpp, max_relevance, item); @@ -235,22 +235,26 @@ fn completion_item( item: CompletionItem, ) { let insert_replace_support = config.insert_replace_support().then_some(tdpp.position); + let ref_match = item.ref_match(); + let lookup = item.lookup().to_string(); + let mut additional_text_edits = Vec::new(); // LSP does not allow arbitrary edits in completion, so we have to do a // non-trivial mapping here. let text_edit = { let mut text_edit = None; - let source_range = item.source_range(); - for indel in item.text_edit().iter() { + let source_range = item.source_range; + for indel in item.text_edit { if indel.delete.contains_range(source_range) { + // Extract this indel as the main edit text_edit = Some(if indel.delete == source_range { self::completion_text_edit(line_index, insert_replace_support, indel.clone()) } else { assert!(source_range.end() == indel.delete.end()); let range1 = TextRange::new(indel.delete.start(), source_range.start()); let range2 = source_range; - let indel1 = Indel::replace(range1, String::new()); + let indel1 = Indel::delete(range1); let indel2 = Indel::replace(range2, indel.insert.clone()); additional_text_edits.push(self::text_edit(line_index, indel1)); self::completion_text_edit(line_index, insert_replace_support, indel2) @@ -264,23 +268,23 @@ fn completion_item( text_edit.unwrap() }; - let insert_text_format = item.is_snippet().then_some(lsp_types::InsertTextFormat::SNIPPET); - let tags = item.deprecated().then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]); - let command = if item.trigger_call_info() && config.client_commands().trigger_parameter_hints { + let insert_text_format = item.is_snippet.then_some(lsp_types::InsertTextFormat::SNIPPET); + let tags = item.deprecated.then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]); + let command = if item.trigger_call_info && config.client_commands().trigger_parameter_hints { Some(command::trigger_parameter_hints()) } else { None }; let mut lsp_item = lsp_types::CompletionItem { - label: item.label().to_string(), - detail: item.detail().map(|it| it.to_string()), - filter_text: Some(item.lookup().to_string()), - kind: Some(completion_item_kind(item.kind())), + label: item.label.to_string(), + detail: item.detail.map(|it| it.to_string()), + filter_text: Some(lookup), + kind: Some(completion_item_kind(item.kind)), text_edit: Some(text_edit), additional_text_edits: Some(additional_text_edits), - documentation: item.documentation().map(documentation), - deprecated: Some(item.deprecated()), + documentation: item.documentation.map(documentation), + deprecated: Some(item.deprecated), tags, command, insert_text_format, @@ -294,12 +298,13 @@ fn completion_item( }); } - set_score(&mut lsp_item, max_relevance, item.relevance()); + set_score(&mut lsp_item, max_relevance, item.relevance); if config.completion().enable_imports_on_the_fly { - if let imports @ [_, ..] = item.imports_to_add() { - let imports: Vec<_> = imports - .iter() + if !item.import_to_add.is_empty() { + let imports: Vec<_> = item + .import_to_add + .into_iter() .filter_map(|import_edit| { let import_path = &import_edit.import_path; let import_name = import_path.segments().last()?; @@ -316,18 +321,13 @@ fn completion_item( } } - if let Some((mutability, offset, relevance)) = item.ref_match() { - let mut lsp_item_with_ref = lsp_item.clone(); + if let Some((label, indel, relevance)) = ref_match { + let mut lsp_item_with_ref = lsp_types::CompletionItem { label, ..lsp_item.clone() }; + lsp_item_with_ref + .additional_text_edits + .get_or_insert_with(Default::default) + .push(self::text_edit(line_index, indel)); set_score(&mut lsp_item_with_ref, max_relevance, relevance); - lsp_item_with_ref.label = - format!("&{}{}", mutability.as_keyword_for_ref(), lsp_item_with_ref.label); - lsp_item_with_ref.additional_text_edits.get_or_insert_with(Default::default).push( - self::text_edit( - line_index, - Indel::insert(offset, format!("&{}", mutability.as_keyword_for_ref())), - ), - ); - acc.push(lsp_item_with_ref); }; @@ -766,6 +766,7 @@ pub(crate) fn folding_range( end_line, end_character: None, kind, + collapsed_text: None, } } else { lsp_types::FoldingRange { @@ -774,6 +775,7 @@ pub(crate) fn folding_range( end_line: range.end.line, end_character: Some(range.end.character), kind, + collapsed_text: None, } } } @@ -1360,7 +1362,7 @@ pub(crate) mod command { pub(crate) fn trigger_parameter_hints() -> lsp_types::Command { lsp_types::Command { title: "triggerParameterHints".into(), - command: "editor.action.triggerParameterHints".into(), + command: "rust-analyzer.triggerParameterHints".into(), arguments: None, } } @@ -1429,7 +1431,7 @@ fn main() { let line_index = LineIndex { index: Arc::new(ide::LineIndex::new(text)), endings: LineEndings::Unix, - encoding: PositionEncoding::Utf16, + encoding: PositionEncoding::Utf8, }; let converted: Vec = folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect(); diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index 5e3e19d44d738..587d640969aac 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -22,7 +22,7 @@ use lsp_types::{ notification::DidOpenTextDocument, request::{ CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest, - WillRenameFiles, WorkspaceSymbol, + WillRenameFiles, WorkspaceSymbolRequest, }, CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams, DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams, @@ -1095,5 +1095,5 @@ pub fn bar() {} .server() .wait_until_workspace_is_loaded(); - server.request::(Default::default(), json!([])); + server.request::(Default::default(), json!([])); } diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index b7275df0f4019..037fc89ace090 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -107,6 +107,7 @@ impl<'a> Project<'a> { did_change_watched_files: Some( lsp_types::DidChangeWatchedFilesClientCapabilities { dynamic_registration: Some(true), + relative_pattern_support: None, }, ), ..Default::default() diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index bd24d7d28bac9..5639aaf57cd97 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs @@ -11,6 +11,7 @@ pub mod hash; pub mod process; pub mod panic_context; pub mod non_empty_vec; +pub mod rand; pub use always_assert::{always, never}; diff --git a/crates/stdx/src/rand.rs b/crates/stdx/src/rand.rs new file mode 100644 index 0000000000000..64aa57eae09cd --- /dev/null +++ b/crates/stdx/src/rand.rs @@ -0,0 +1,21 @@ +//! We don't use `rand`, as that's too many things for us. +//! +//! We currently use oorandom instead, but it's missing these two utilities. +//! Perhaps we should switch to `fastrand`, or our own small PRNG, it's not like +//! we need anything more complicated than xor-shift. + +pub fn shuffle(slice: &mut [T], mut rand_index: impl FnMut(usize) -> usize) { + let mut remaining = slice.len() - 1; + while remaining > 0 { + let index = rand_index(remaining); + slice.swap(remaining, index); + remaining -= 1; + } +} + +pub fn seed() -> u64 { + use std::collections::hash_map::RandomState; + use std::hash::{BuildHasher, Hasher}; + + RandomState::new().build_hasher().finish() +} diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 84c66b27e69fa..6f57cbad66b16 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -186,7 +186,7 @@ impl SourceFile { /// ``` #[macro_export] macro_rules! match_ast { - (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; + (match $node:ident { $($tt:tt)* }) => { $crate::match_ast!(match ($node) { $($tt)* }) }; (match ($node:expr) { $( $( $path:ident )::+ ($it:pat) => $res:expr, )* diff --git a/docs/dev/architecture.md b/docs/dev/architecture.md index a07cf036e0604..895de5798ac30 100644 --- a/docs/dev/architecture.md +++ b/docs/dev/architecture.md @@ -119,7 +119,7 @@ See [#93](https://github.com/rust-lang/rust-analyzer/pull/93) for an example PR **Architecture Invariant:** `syntax` crate is completely independent from the rest of rust-analyzer. It knows nothing about salsa or LSP. This is important because it is possible to make useful tooling using only the syntax tree. Without semantic information, you don't need to be able to _build_ code, which makes the tooling more robust. -See also https://web.stanford.edu/~mlfbrown/paper.pdf. +See also https://mlfbrown.com/paper.pdf. You can view the `syntax` crate as an entry point to rust-analyzer. `syntax` crate is an **API Boundary**. diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index a794e866181df..c3623a5cc46cf 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@