diff --git a/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml b/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml index 2a842f3b3114d..37cf5f3726b25 100644 --- a/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml +++ b/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml @@ -11,10 +11,11 @@ jobs: if: github.repository == 'rust-lang/rust-analyzer' uses: rust-lang/josh-sync/.github/workflows/rustc-pull.yml@main with: + github-app-id: ${{ vars.APP_CLIENT_ID }} zulip-stream-id: 185405 zulip-bot-email: "rust-analyzer-ci-bot@rust-lang.zulipchat.com" pr-base-branch: master branch-name: rustc-pull secrets: zulip-api-token: ${{ secrets.ZULIP_API_TOKEN }} - token: ${{ secrets.GITHUB_TOKEN }} + github-app-secret: ${{ secrets.APP_PRIVATE_KEY }} diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 7d03300c22167..5a29379ba4818 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -23,6 +23,12 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" +[[package]] +name = "anstyle" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" + [[package]] name = "anyhow" version = "1.0.98" @@ -44,6 +50,15 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +[[package]] +name = "atomic-polyfill" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cf2bce30dfe09ef0bfaef228b9d414faaf7e563035494d7fe092dba54b300f4" +dependencies = [ + "critical-section", +] + [[package]] name = "autocfg" version = "1.4.0" @@ -119,6 +134,12 @@ version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26c4925bc979b677330a8c7fe7a8c94af2dbb4a2d37b4a20a80d884400f46baa" +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + [[package]] name = "camino" version = "1.1.10" @@ -287,6 +308,40 @@ dependencies = [ "tracing", ] +[[package]] +name = "clap" +version = "4.5.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed87a9d530bb41a67537289bafcac159cb3ee28460e0a4571123d2a778a6a882" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.5.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64f4f3f3c77c94aff3c7e9aac9a2ca1974a5adf392a8bb751e827d6d127ab966" +dependencies = [ + "anstyle", + "clap_lex", +] + +[[package]] +name = "clap_lex" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" + +[[package]] +name = "cobs" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa961b519f0b462e3a3b4a34b64d119eeaca1d59af726fe450bbba07a9fc0a1" +dependencies = [ + "thiserror 2.0.12", +] + [[package]] name = "countme" version = "3.0.1" @@ -308,6 +363,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "critical-section" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" + [[package]] name = "crossbeam-channel" version = "0.5.15" @@ -565,6 +626,15 @@ version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +[[package]] +name = "hash32" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67" +dependencies = [ + "byteorder", +] + [[package]] name = "hashbrown" version = "0.14.5" @@ -591,6 +661,20 @@ dependencies = [ "hashbrown 0.15.4", ] +[[package]] +name = "heapless" +version = "0.7.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f" +dependencies = [ + "atomic-polyfill", + "hash32", + "rustc_version", + "serde", + "spin", + "stable_deref_trait", +] + [[package]] name = "hermit-abi" version = "0.5.2" @@ -1561,6 +1645,17 @@ version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" +[[package]] +name = "postcard" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6764c3b5dd454e283a30e6dfe78e9b31096d9e32036b5d1eaac7a6119ccb9a24" +dependencies = [ + "cobs", + "heapless", + "serde", +] + [[package]] name = "potential_utf" version = "0.1.2" @@ -1608,6 +1703,7 @@ dependencies = [ "ra-ap-rustc_lexer 0.123.0", "span", "syntax-bridge", + "temp-dir", "tt", ] @@ -1615,6 +1711,8 @@ dependencies = [ name = "proc-macro-srv-cli" version = "0.0.0" dependencies = [ + "clap", + "postcard", "proc-macro-api", "proc-macro-srv", "tt", @@ -1991,6 +2089,15 @@ dependencies = [ "smallvec", ] +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + [[package]] name = "ryu" version = "1.0.20" @@ -2208,6 +2315,15 @@ dependencies = [ "vfs", ] +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + [[package]] name = "stable_deref_trait" version = "1.2.0" diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs index ad17f1730bef7..b8eadb608fea5 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs @@ -206,6 +206,7 @@ impl EditionedFileId { #[salsa_macros::input(debug)] pub struct FileText { + #[returns(ref)] pub text: Arc, pub file_id: vfs::FileId, } @@ -357,7 +358,7 @@ fn parse(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Parse Option<&[SyntaxError]> { diff --git a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs index aed00aa9fc447..f83c21eb8d64a 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs @@ -134,10 +134,10 @@ fn next_cfg_expr(it: &mut tt::iter::TtIter<'_, S>) -> Option { }; // Eat comma separator - if let Some(TtElement::Leaf(tt::Leaf::Punct(punct))) = it.peek() { - if punct.char == ',' { - it.next(); - } + if let Some(TtElement::Leaf(tt::Leaf::Punct(punct))) = it.peek() + && punct.char == ',' + { + it.next(); } Some(ret) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs index b509e69b0d37b..53250510f875c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs @@ -377,10 +377,10 @@ fn parse_repr_tt(tt: &crate::tt::TopSubtree) -> Option { let mut align = None; if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() { tts.next(); - if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() { - if let Ok(a) = lit.symbol.as_str().parse() { - align = Align::from_bytes(a).ok(); - } + if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() + && let Ok(a) = lit.symbol.as_str().parse() + { + align = Align::from_bytes(a).ok(); } } ReprOptions { align, ..Default::default() } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs index abd1382801dda..3b9281ffb9c12 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs @@ -1487,13 +1487,13 @@ impl ExprCollector<'_> { ast::Expr::UnderscoreExpr(_) => self.alloc_pat_from_expr(Pat::Wild, syntax_ptr), ast::Expr::ParenExpr(e) => { // We special-case `(..)` for consistency with patterns. - if let Some(ast::Expr::RangeExpr(range)) = e.expr() { - if range.is_range_full() { - return Some(self.alloc_pat_from_expr( - Pat::Tuple { args: Box::default(), ellipsis: Some(0) }, - syntax_ptr, - )); - } + if let Some(ast::Expr::RangeExpr(range)) = e.expr() + && range.is_range_full() + { + return Some(self.alloc_pat_from_expr( + Pat::Tuple { args: Box::default(), ellipsis: Some(0) }, + syntax_ptr, + )); } return e.expr().and_then(|expr| self.maybe_collect_expr_as_pat(&expr)); } @@ -2569,19 +2569,18 @@ impl ExprCollector<'_> { } } RibKind::MacroDef(macro_id) => { - if let Some((parent_ctx, label_macro_id)) = hygiene_info { - if label_macro_id == **macro_id { - // A macro is allowed to refer to labels from before its declaration. - // Therefore, if we got to the rib of its declaration, give up its hygiene - // and use its parent expansion. - - hygiene_id = - HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db)); - hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| { - let expansion = self.db.lookup_intern_macro_call(expansion.into()); - (parent_ctx.parent(self.db), expansion.def) - }); - } + if let Some((parent_ctx, label_macro_id)) = hygiene_info + && label_macro_id == **macro_id + { + // A macro is allowed to refer to labels from before its declaration. + // Therefore, if we got to the rib of its declaration, give up its hygiene + // and use its parent expansion. + + hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db)); + hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| { + let expansion = self.db.lookup_intern_macro_call(expansion.into()); + (parent_ctx.parent(self.db), expansion.def) + }); } } _ => {} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs index 3bc4afb5c8ac3..230d1c9346362 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs @@ -259,10 +259,10 @@ impl ExprCollector<'_> { } }; - if let Some(operand_idx) = operand_idx { - if let Some(position_span) = to_span(arg.position_span) { - mappings.push((position_span, operand_idx)); - } + if let Some(operand_idx) = operand_idx + && let Some(position_span) = to_span(arg.position_span) + { + mappings.push((position_span, operand_idx)); } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path.rs index be006c98a5827..579465e10f932 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path.rs @@ -211,16 +211,17 @@ pub(super) fn lower_path( // Basically, even in rustc it is quite hacky: // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456 // We follow what it did anyway :) - if segments.len() == 1 && kind == PathKind::Plain { - if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { - let syn_ctxt = collector.expander.ctx_for_range(path.segment()?.syntax().text_range()); - if let Some(macro_call_id) = syn_ctxt.outer_expn(collector.db) { - if collector.db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner { - kind = match resolve_crate_root(collector.db, syn_ctxt) { - Some(crate_root) => PathKind::DollarCrate(crate_root), - None => PathKind::Crate, - } - } + if segments.len() == 1 + && kind == PathKind::Plain + && let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) + { + let syn_ctxt = collector.expander.ctx_for_range(path.segment()?.syntax().text_range()); + if let Some(macro_call_id) = syn_ctxt.outer_expn(collector.db) + && collector.db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner + { + kind = match resolve_crate_root(collector.db, syn_ctxt) { + Some(crate_root) => PathKind::DollarCrate(crate_root), + None => PathKind::Crate, } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs index f1b011333d94e..b81dcc1fe96df 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs @@ -900,14 +900,12 @@ impl Printer<'_> { let field_name = arg.name.display(self.db, edition).to_string(); let mut same_name = false; - if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] { - if let Binding { name, mode: BindingAnnotation::Unannotated, .. } = + if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] + && let Binding { name, mode: BindingAnnotation::Unannotated, .. } = &self.store.assert_expr_only().bindings[*id] - { - if name.as_str() == field_name { - same_name = true; - } - } + && name.as_str() == field_name + { + same_name = true; } w!(p, "{}", field_name); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs index dccfff002f23d..faa0ef8ceec7b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs @@ -107,11 +107,11 @@ struct FindPathCtx<'db> { /// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Option { // - if the item is a module, jump straight to module search - if !ctx.is_std_item { - if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item { - return find_path_for_module(ctx, &mut FxHashSet::default(), module_id, true, max_len) - .map(|choice| choice.path); - } + if !ctx.is_std_item + && let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item + { + return find_path_for_module(ctx, &mut FxHashSet::default(), module_id, true, max_len) + .map(|choice| choice.path); } let may_be_in_scope = match ctx.prefix { @@ -226,15 +226,15 @@ fn find_path_for_module( } // - if the module can be referenced as self, super or crate, do that - if let Some(kind) = is_kw_kind_relative_to_from(ctx.from_def_map, module_id, ctx.from) { - if ctx.prefix != PrefixKind::ByCrate || kind == PathKind::Crate { - return Some(Choice { - path: ModPath::from_segments(kind, None), - path_text_len: path_kind_len(kind), - stability: Stable, - prefer_due_to_prelude: false, - }); - } + if let Some(kind) = is_kw_kind_relative_to_from(ctx.from_def_map, module_id, ctx.from) + && (ctx.prefix != PrefixKind::ByCrate || kind == PathKind::Crate) + { + return Some(Choice { + path: ModPath::from_segments(kind, None), + path_text_len: path_kind_len(kind), + stability: Stable, + prefer_due_to_prelude: false, + }); } // - if the module is in the prelude, return it by that path @@ -604,29 +604,29 @@ fn find_local_import_locations( &def_map[module.local_id] }; - if let Some((name, vis, declared)) = data.scope.name_of(item) { - if vis.is_visible_from(db, from) { - let is_pub_or_explicit = match vis { - Visibility::Module(_, VisibilityExplicitness::Explicit) => { - cov_mark::hit!(explicit_private_imports); - true - } - Visibility::Module(_, VisibilityExplicitness::Implicit) => { - cov_mark::hit!(discount_private_imports); - false - } - Visibility::PubCrate(_) => true, - Visibility::Public => true, - }; - - // Ignore private imports unless they are explicit. these could be used if we are - // in a submodule of this module, but that's usually not - // what the user wants; and if this module can import - // the item and we're a submodule of it, so can we. - // Also this keeps the cached data smaller. - if declared || is_pub_or_explicit { - cb(visited_modules, name, module); + if let Some((name, vis, declared)) = data.scope.name_of(item) + && vis.is_visible_from(db, from) + { + let is_pub_or_explicit = match vis { + Visibility::Module(_, VisibilityExplicitness::Explicit) => { + cov_mark::hit!(explicit_private_imports); + true } + Visibility::Module(_, VisibilityExplicitness::Implicit) => { + cov_mark::hit!(discount_private_imports); + false + } + Visibility::PubCrate(_) => true, + Visibility::Public => true, + }; + + // Ignore private imports unless they are explicit. these could be used if we are + // in a submodule of this module, but that's usually not + // what the user wants; and if this module can import + // the item and we're a submodule of it, so can we. + // Also this keeps the cached data smaller. + if declared || is_pub_or_explicit { + cb(visited_modules, name, module); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index efa4399468501..8f526d1a2369a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -510,12 +510,11 @@ impl ItemScope { id: AttrId, idx: usize, ) { - if let Some(derives) = self.derive_macros.get_mut(&adt) { - if let Some(DeriveMacroInvocation { derive_call_ids, .. }) = + if let Some(derives) = self.derive_macros.get_mut(&adt) + && let Some(DeriveMacroInvocation { derive_call_ids, .. }) = derives.iter_mut().find(|&&mut DeriveMacroInvocation { attr_id, .. }| id == attr_id) - { - derive_call_ids[idx] = Some(call); - } + { + derive_call_ids[idx] = Some(call); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs index 5ab61c89394bf..032b287cd6a82 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs @@ -83,12 +83,12 @@ impl<'a> Ctx<'a> { .flat_map(|item| self.lower_mod_item(&item)) .collect(); - if let Some(ast::Expr::MacroExpr(tail_macro)) = stmts.expr() { - if let Some(call) = tail_macro.macro_call() { - cov_mark::hit!(macro_stmt_with_trailing_macro_expr); - if let Some(mod_item) = self.lower_mod_item(&call.into()) { - self.top_level.push(mod_item); - } + if let Some(ast::Expr::MacroExpr(tail_macro)) = stmts.expr() + && let Some(call) = tail_macro.macro_call() + { + cov_mark::hit!(macro_stmt_with_trailing_macro_expr); + if let Some(mod_item) = self.lower_mod_item(&call.into()) { + self.top_level.push(mod_item); } } @@ -112,12 +112,11 @@ impl<'a> Ctx<'a> { _ => None, }) .collect(); - if let Some(ast::Expr::MacroExpr(expr)) = block.tail_expr() { - if let Some(call) = expr.macro_call() { - if let Some(mod_item) = self.lower_mod_item(&call.into()) { - self.top_level.push(mod_item); - } - } + if let Some(ast::Expr::MacroExpr(expr)) = block.tail_expr() + && let Some(call) = expr.macro_call() + && let Some(mod_item) = self.lower_mod_item(&call.into()) + { + self.top_level.push(mod_item); } self.tree.vis.arena = self.visibilities.into_iter().collect(); self.tree.top_level = self.top_level.into_boxed_slice(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index 750308026eec6..d431f2140165e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -218,10 +218,10 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option for (_, module_data) in crate_def_map.modules() { for def in module_data.scope.declarations() { - if let ModuleDefId::TraitId(trait_) = def { - if db.attrs(trait_.into()).has_doc_notable_trait() { - traits.push(trait_); - } + if let ModuleDefId::TraitId(trait_) = def + && db.attrs(trait_.into()).has_doc_notable_trait() + { + traits.push(trait_); } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index 5e95b061399af..e8ae499d27b26 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -221,46 +221,42 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream _ => None, }; - if let Some(src) = src { - if let Some(file_id) = src.file_id.macro_file() { - if let MacroKind::Derive - | MacroKind::DeriveBuiltIn - | MacroKind::Attr - | MacroKind::AttrBuiltIn = file_id.kind(&db) - { - let call = file_id.call_node(&db); - let mut show_spans = false; - let mut show_ctxt = false; - for comment in - call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) - { - show_spans |= comment.to_string().contains("+spans"); - show_ctxt |= comment.to_string().contains("+syntaxctxt"); - } - let pp = pretty_print_macro_expansion( - src.value, - db.span_map(src.file_id).as_ref(), - show_spans, - show_ctxt, - ); - format_to!(expanded_text, "\n{}", pp) - } + if let Some(src) = src + && let Some(file_id) = src.file_id.macro_file() + && let MacroKind::Derive + | MacroKind::DeriveBuiltIn + | MacroKind::Attr + | MacroKind::AttrBuiltIn = file_id.kind(&db) + { + let call = file_id.call_node(&db); + let mut show_spans = false; + let mut show_ctxt = false; + for comment in call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) { + show_spans |= comment.to_string().contains("+spans"); + show_ctxt |= comment.to_string().contains("+syntaxctxt"); } + let pp = pretty_print_macro_expansion( + src.value, + db.span_map(src.file_id).as_ref(), + show_spans, + show_ctxt, + ); + format_to!(expanded_text, "\n{}", pp) } } for impl_id in def_map[local_id].scope.impls() { let src = impl_id.lookup(&db).source(&db); - if let Some(macro_file) = src.file_id.macro_file() { - if let MacroKind::DeriveBuiltIn | MacroKind::Derive = macro_file.kind(&db) { - let pp = pretty_print_macro_expansion( - src.value.syntax().clone(), - db.span_map(macro_file.into()).as_ref(), - false, - false, - ); - format_to!(expanded_text, "\n{}", pp) - } + if let Some(macro_file) = src.file_id.macro_file() + && let MacroKind::DeriveBuiltIn | MacroKind::Derive = macro_file.kind(&db) + { + let pp = pretty_print_macro_expansion( + src.value.syntax().clone(), + db.span_map(macro_file.into()).as_ref(), + false, + false, + ); + format_to!(expanded_text, "\n{}", pp) } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 0c3274d849ad8..267c4451b9d71 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -261,20 +261,20 @@ impl<'db> DefCollector<'db> { // Process other crate-level attributes. for attr in &*attrs { - if let Some(cfg) = attr.cfg() { - if self.cfg_options.check(&cfg) == Some(false) { - process = false; - break; - } + if let Some(cfg) = attr.cfg() + && self.cfg_options.check(&cfg) == Some(false) + { + process = false; + break; } let Some(attr_name) = attr.path.as_ident() else { continue }; match () { () if *attr_name == sym::recursion_limit => { - if let Some(limit) = attr.string_value() { - if let Ok(limit) = limit.as_str().parse() { - crate_data.recursion_limit = Some(limit); - } + if let Some(limit) = attr.string_value() + && let Ok(limit) = limit.as_str().parse() + { + crate_data.recursion_limit = Some(limit); } } () if *attr_name == sym::crate_type => { @@ -1188,56 +1188,44 @@ impl<'db> DefCollector<'db> { // Multiple globs may import the same item and they may override visibility from // previously resolved globs. Handle overrides here and leave the rest to // `ItemScope::push_res_with_import()`. - if let Some(def) = defs.types { - if let Some(prev_def) = prev_defs.types { - if def.def == prev_def.def - && self.from_glob_import.contains_type(module_id, name.clone()) - && def.vis != prev_def.vis - && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) - { - changed = true; - // This import is being handled here, don't pass it down to - // `ItemScope::push_res_with_import()`. - defs.types = None; - self.def_map.modules[module_id] - .scope - .update_visibility_types(name, def.vis); - } - } + if let Some(def) = defs.types + && let Some(prev_def) = prev_defs.types + && def.def == prev_def.def + && self.from_glob_import.contains_type(module_id, name.clone()) + && def.vis != prev_def.vis + && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) + { + changed = true; + // This import is being handled here, don't pass it down to + // `ItemScope::push_res_with_import()`. + defs.types = None; + self.def_map.modules[module_id].scope.update_visibility_types(name, def.vis); } - if let Some(def) = defs.values { - if let Some(prev_def) = prev_defs.values { - if def.def == prev_def.def - && self.from_glob_import.contains_value(module_id, name.clone()) - && def.vis != prev_def.vis - && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) - { - changed = true; - // See comment above. - defs.values = None; - self.def_map.modules[module_id] - .scope - .update_visibility_values(name, def.vis); - } - } + if let Some(def) = defs.values + && let Some(prev_def) = prev_defs.values + && def.def == prev_def.def + && self.from_glob_import.contains_value(module_id, name.clone()) + && def.vis != prev_def.vis + && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) + { + changed = true; + // See comment above. + defs.values = None; + self.def_map.modules[module_id].scope.update_visibility_values(name, def.vis); } - if let Some(def) = defs.macros { - if let Some(prev_def) = prev_defs.macros { - if def.def == prev_def.def - && self.from_glob_import.contains_macro(module_id, name.clone()) - && def.vis != prev_def.vis - && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) - { - changed = true; - // See comment above. - defs.macros = None; - self.def_map.modules[module_id] - .scope - .update_visibility_macros(name, def.vis); - } - } + if let Some(def) = defs.macros + && let Some(prev_def) = prev_defs.macros + && def.def == prev_def.def + && self.from_glob_import.contains_macro(module_id, name.clone()) + && def.vis != prev_def.vis + && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) + { + changed = true; + // See comment above. + defs.macros = None; + self.def_map.modules[module_id].scope.update_visibility_macros(name, def.vis); } } @@ -1392,15 +1380,14 @@ impl<'db> DefCollector<'db> { Resolved::Yes }; - if let Some(ident) = path.as_ident() { - if let Some(helpers) = self.def_map.derive_helpers_in_scope.get(&ast_id) { - if helpers.iter().any(|(it, ..)| it == ident) { - cov_mark::hit!(resolved_derive_helper); - // Resolved to derive helper. Collect the item's attributes again, - // starting after the derive helper. - return recollect_without(self); - } - } + if let Some(ident) = path.as_ident() + && let Some(helpers) = self.def_map.derive_helpers_in_scope.get(&ast_id) + && helpers.iter().any(|(it, ..)| it == ident) + { + cov_mark::hit!(resolved_derive_helper); + // Resolved to derive helper. Collect the item's attributes again, + // starting after the derive helper. + return recollect_without(self); } let def = match resolver_def_id(path) { @@ -1729,12 +1716,12 @@ impl ModCollector<'_, '_> { let mut process_mod_item = |item: ModItemId| { let attrs = self.item_tree.attrs(db, krate, item.ast_id()); - if let Some(cfg) = attrs.cfg() { - if !self.is_cfg_enabled(&cfg) { - let ast_id = item.ast_id().erase(); - self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg); - return; - } + if let Some(cfg) = attrs.cfg() + && !self.is_cfg_enabled(&cfg) + { + let ast_id = item.ast_id().erase(); + self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg); + return; } if let Err(()) = self.resolve_attributes(&attrs, item, container) { @@ -1871,14 +1858,13 @@ impl ModCollector<'_, '_> { if self.def_collector.def_map.block.is_none() && self.def_collector.is_proc_macro && self.module_id == DefMap::ROOT + && let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { - if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { - self.def_collector.export_proc_macro( - proc_macro, - InFile::new(self.file_id(), id), - fn_id, - ); - } + self.def_collector.export_proc_macro( + proc_macro, + InFile::new(self.file_id(), id), + fn_id, + ); } update_def(self.def_collector, fn_id.into(), &it.name, vis, false); @@ -2419,13 +2405,13 @@ impl ModCollector<'_, '_> { macro_id, &self.item_tree[mac.visibility], ); - if let Some(helpers) = helpers_opt { - if self.def_collector.def_map.block.is_none() { - Arc::get_mut(&mut self.def_collector.def_map.data) - .unwrap() - .exported_derives - .insert(macro_id.into(), helpers); - } + if let Some(helpers) = helpers_opt + && self.def_collector.def_map.block.is_none() + { + Arc::get_mut(&mut self.def_collector.def_map.data) + .unwrap() + .exported_derives + .insert(macro_id.into(), helpers); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs index 316ad5dae69df..a10990e6a8f9f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs @@ -228,15 +228,15 @@ impl<'db> Resolver<'db> { ResolvePathResultPrefixInfo::default(), )); } - } else if let &GenericDefId::AdtId(adt) = def { - if *first_name == sym::Self_ { - return Some(( - TypeNs::AdtSelfType(adt), - remaining_idx(), - None, - ResolvePathResultPrefixInfo::default(), - )); - } + } else if let &GenericDefId::AdtId(adt) = def + && *first_name == sym::Self_ + { + return Some(( + TypeNs::AdtSelfType(adt), + remaining_idx(), + None, + ResolvePathResultPrefixInfo::default(), + )); } if let Some(id) = params.find_type_by_name(first_name, *def) { return Some(( @@ -401,13 +401,13 @@ impl<'db> Resolver<'db> { handle_macro_def_scope(db, &mut hygiene_id, &mut hygiene_info, macro_id) } Scope::GenericParams { params, def } => { - if let &GenericDefId::ImplId(impl_) = def { - if *first_name == sym::Self_ { - return Some(( - ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_), None), - ResolvePathResultPrefixInfo::default(), - )); - } + if let &GenericDefId::ImplId(impl_) = def + && *first_name == sym::Self_ + { + return Some(( + ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_), None), + ResolvePathResultPrefixInfo::default(), + )); } if let Some(id) = params.find_const_by_name(first_name, *def) { let val = ValueNs::GenericParam(id); @@ -436,14 +436,14 @@ impl<'db> Resolver<'db> { ResolvePathResultPrefixInfo::default(), )); } - } else if let &GenericDefId::AdtId(adt) = def { - if *first_name == sym::Self_ { - let ty = TypeNs::AdtSelfType(adt); - return Some(( - ResolveValueResult::Partial(ty, 1, None), - ResolvePathResultPrefixInfo::default(), - )); - } + } else if let &GenericDefId::AdtId(adt) = def + && *first_name == sym::Self_ + { + let ty = TypeNs::AdtSelfType(adt); + return Some(( + ResolveValueResult::Partial(ty, 1, None), + ResolvePathResultPrefixInfo::default(), + )); } if let Some(id) = params.find_type_by_name(first_name, *def) { let ty = TypeNs::GenericParam(id); @@ -469,13 +469,14 @@ impl<'db> Resolver<'db> { // If a path of the shape `u16::from_le_bytes` failed to resolve at all, then we fall back // to resolving to the primitive type, to allow this to still work in the presence of // `use core::u16;`. - if path.kind == PathKind::Plain && n_segments > 1 { - if let Some(builtin) = BuiltinType::by_name(first_name) { - return Some(( - ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1, None), - ResolvePathResultPrefixInfo::default(), - )); - } + if path.kind == PathKind::Plain + && n_segments > 1 + && let Some(builtin) = BuiltinType::by_name(first_name) + { + return Some(( + ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1, None), + ResolvePathResultPrefixInfo::default(), + )); } None @@ -660,12 +661,11 @@ impl<'db> Resolver<'db> { Scope::BlockScope(m) => traits.extend(m.def_map[m.module_id].scope.traits()), &Scope::GenericParams { def: GenericDefId::ImplId(impl_), .. } => { let impl_data = db.impl_signature(impl_); - if let Some(target_trait) = impl_data.target_trait { - if let Some(TypeNs::TraitId(trait_)) = self + if let Some(target_trait) = impl_data.target_trait + && let Some(TypeNs::TraitId(trait_)) = self .resolve_path_in_type_ns_fully(db, &impl_data.store[target_trait.path]) - { - traits.insert(trait_); - } + { + traits.insert(trait_); } } _ => (), @@ -918,17 +918,17 @@ fn handle_macro_def_scope( hygiene_info: &mut Option<(SyntaxContext, MacroDefId)>, macro_id: &MacroDefId, ) { - if let Some((parent_ctx, label_macro_id)) = hygiene_info { - if label_macro_id == macro_id { - // A macro is allowed to refer to variables from before its declaration. - // Therefore, if we got to the rib of its declaration, give up its hygiene - // and use its parent expansion. - *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db)); - *hygiene_info = parent_ctx.outer_expn(db).map(|expansion| { - let expansion = db.lookup_intern_macro_call(expansion.into()); - (parent_ctx.parent(db), expansion.def) - }); - } + if let Some((parent_ctx, label_macro_id)) = hygiene_info + && label_macro_id == macro_id + { + // A macro is allowed to refer to variables from before its declaration. + // Therefore, if we got to the rib of its declaration, give up its hygiene + // and use its parent expansion. + *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db)); + *hygiene_info = parent_ctx.outer_expn(db).map(|expansion| { + let expansion = db.lookup_intern_macro_call(expansion.into()); + (parent_ctx.parent(db), expansion.def) + }); } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs index 4a9af01091f2e..ec34461376165 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs @@ -555,12 +555,11 @@ fn concat_expand( // FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses // to ensure the right parsing order, so skip the parentheses here. Ideally we'd // implement rustc's model. cc https://github.com/rust-lang/rust-analyzer/pull/10623 - if let TtElement::Subtree(subtree, subtree_iter) = &t { - if let [tt::TokenTree::Leaf(tt)] = subtree_iter.remaining().flat_tokens() { - if subtree.delimiter.kind == tt::DelimiterKind::Parenthesis { - t = TtElement::Leaf(tt); - } - } + if let TtElement::Subtree(subtree, subtree_iter) = &t + && let [tt::TokenTree::Leaf(tt)] = subtree_iter.remaining().flat_tokens() + && subtree.delimiter.kind == tt::DelimiterKind::Parenthesis + { + t = TtElement::Leaf(tt); } match t { TtElement::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => { @@ -891,7 +890,7 @@ fn include_str_expand( }; let text = db.file_text(file_id.file_id(db)); - let text = &*text.text(db); + let text = &**text.text(db); ExpandResult::ok(quote!(call_site =>#text)) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs index c6ea4a3a33db8..d5ebd6ee19f5c 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs @@ -334,10 +334,10 @@ where _ => Some(CfgExpr::Atom(CfgAtom::Flag(name))), }, }; - if let Some(NodeOrToken::Token(element)) = iter.peek() { - if element.kind() == syntax::T![,] { - iter.next(); - } + if let Some(NodeOrToken::Token(element)) = iter.peek() + && element.kind() == syntax::T![,] + { + iter.next(); } result } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs index 6730b337d356f..a7f3e27a45539 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs @@ -99,6 +99,16 @@ impl FileRange { pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FileRangeWrapper { FileRangeWrapper { file_id: self.file_id.file_id(db), range: self.range } } + + #[inline] + pub fn file_text(self, db: &dyn ExpandDatabase) -> &triomphe::Arc { + db.file_text(self.file_id.file_id(db)).text(db) + } + + #[inline] + pub fn text(self, db: &dyn ExpandDatabase) -> &str { + &self.file_text(db)[self.range] + } } /// `AstId` points to an AST node in any file. diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs index 4a4a3e52aea43..fe77e1565987f 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs @@ -280,8 +280,8 @@ pub(crate) fn fixup_syntax( } }, ast::RecordExprField(it) => { - if let Some(colon) = it.colon_token() { - if it.name_ref().is_some() && it.expr().is_none() { + if let Some(colon) = it.colon_token() + && it.name_ref().is_some() && it.expr().is_none() { append.insert(colon.into(), vec![ Leaf::Ident(Ident { sym: sym::__ra_fixup, @@ -290,11 +290,10 @@ pub(crate) fn fixup_syntax( }) ]); } - } }, ast::Path(it) => { - if let Some(colon) = it.coloncolon_token() { - if it.segment().is_none() { + if let Some(colon) = it.coloncolon_token() + && it.segment().is_none() { append.insert(colon.into(), vec![ Leaf::Ident(Ident { sym: sym::__ra_fixup, @@ -303,7 +302,6 @@ pub(crate) fn fixup_syntax( }) ]); } - } }, ast::ClosureExpr(it) => { if it.body().is_none() { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index ac61b22009706..472ec83ffef5b 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -365,12 +365,11 @@ impl HirFileId { HirFileId::FileId(id) => break id, HirFileId::MacroFile(file) => { let loc = db.lookup_intern_macro_call(file); - if loc.def.is_include() { - if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind { - if let Ok(it) = include_input_to_file_id(db, file, &eager.arg) { - break it; - } - } + if loc.def.is_include() + && let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind + && let Ok(it) = include_input_to_file_id(db, file, &eager.arg) + { + break it; } self = loc.kind.file_id(); } @@ -648,12 +647,11 @@ impl MacroCallLoc { db: &dyn ExpandDatabase, macro_call_id: MacroCallId, ) -> Option { - if self.def.is_include() { - if let MacroCallKind::FnLike { eager: Some(eager), .. } = &self.kind { - if let Ok(it) = include_input_to_file_id(db, macro_call_id, &eager.arg) { - return Some(it); - } - } + if self.def.is_include() + && let MacroCallKind::FnLike { eager: Some(eager), .. } = &self.kind + && let Ok(it) = include_input_to_file_id(db, macro_call_id, &eager.arg) + { + return Some(it); } None diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs index 9f1e3879e1eeb..d84d978cdb7ed 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs @@ -273,16 +273,17 @@ fn convert_path( // Basically, even in rustc it is quite hacky: // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456 // We follow what it did anyway :) - if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain { - if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { - let syn_ctx = span_for_range(segment.syntax().text_range()); - if let Some(macro_call_id) = syn_ctx.outer_expn(db) { - if db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner { - mod_path.kind = match resolve_crate_root(db, syn_ctx) { - Some(crate_root) => PathKind::DollarCrate(crate_root), - None => PathKind::Crate, - } - } + if mod_path.segments.len() == 1 + && mod_path.kind == PathKind::Plain + && let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) + { + let syn_ctx = span_for_range(segment.syntax().text_range()); + if let Some(macro_call_id) = syn_ctx.outer_expn(db) + && db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner + { + mod_path.kind = match resolve_crate_root(db, syn_ctx) { + Some(crate_root) => PathKind::DollarCrate(crate_root), + None => PathKind::Crate, } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs index cc8f7bf04a5cb..26ca7fb9a15ec 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs @@ -197,10 +197,11 @@ pub(crate) fn deref_by_trait( // effectively bump the MSRV of rust-analyzer to 1.84 due to 1.83 and below lacking the // blanked impl on `Deref`. #[expect(clippy::overly_complex_bool_expr)] - if use_receiver_trait && false { - if let Some(receiver) = LangItem::Receiver.resolve_trait(db, table.trait_env.krate) { - return Some(receiver); - } + if use_receiver_trait + && false + && let Some(receiver) = LangItem::Receiver.resolve_trait(db, table.trait_env.krate) + { + return Some(receiver); } // Old rustc versions might not have `Receiver` trait. // Fallback to `Deref` if they don't diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs index 77d15a73af6ff..8af8fb73f344e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs @@ -309,11 +309,11 @@ impl TyBuilder { if let Some(defaults) = defaults.get(self.vec.len()..) { for default_ty in defaults { // NOTE(skip_binders): we only check if the arg type is error type. - if let Some(x) = default_ty.skip_binders().ty(Interner) { - if x.is_unknown() { - self.vec.push(fallback().cast(Interner)); - continue; - } + if let Some(x) = default_ty.skip_binders().ty(Interner) + && x.is_unknown() + { + self.vec.push(fallback().cast(Interner)); + continue; } // Each default can only depend on the previous parameters. self.vec.push(default_ty.clone().substitute(Interner, &*self.vec).cast(Interner)); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index 26b635298a651..3ba7c93d4fb76 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -83,34 +83,34 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None }) } fn discriminant_type(&self, ty: chalk_ir::Ty) -> chalk_ir::Ty { - if let chalk_ir::TyKind::Adt(id, _) = ty.kind(Interner) { - if let hir_def::AdtId::EnumId(e) = id.0 { - let enum_data = self.db.enum_signature(e); - let ty = enum_data.repr.unwrap_or_default().discr_type(); - return chalk_ir::TyKind::Scalar(match ty { - hir_def::layout::IntegerType::Pointer(is_signed) => match is_signed { - true => chalk_ir::Scalar::Int(chalk_ir::IntTy::Isize), - false => chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize), - }, - hir_def::layout::IntegerType::Fixed(size, is_signed) => match is_signed { - true => chalk_ir::Scalar::Int(match size { - hir_def::layout::Integer::I8 => chalk_ir::IntTy::I8, - hir_def::layout::Integer::I16 => chalk_ir::IntTy::I16, - hir_def::layout::Integer::I32 => chalk_ir::IntTy::I32, - hir_def::layout::Integer::I64 => chalk_ir::IntTy::I64, - hir_def::layout::Integer::I128 => chalk_ir::IntTy::I128, - }), - false => chalk_ir::Scalar::Uint(match size { - hir_def::layout::Integer::I8 => chalk_ir::UintTy::U8, - hir_def::layout::Integer::I16 => chalk_ir::UintTy::U16, - hir_def::layout::Integer::I32 => chalk_ir::UintTy::U32, - hir_def::layout::Integer::I64 => chalk_ir::UintTy::U64, - hir_def::layout::Integer::I128 => chalk_ir::UintTy::U128, - }), - }, - }) - .intern(Interner); - } + if let chalk_ir::TyKind::Adt(id, _) = ty.kind(Interner) + && let hir_def::AdtId::EnumId(e) = id.0 + { + let enum_data = self.db.enum_signature(e); + let ty = enum_data.repr.unwrap_or_default().discr_type(); + return chalk_ir::TyKind::Scalar(match ty { + hir_def::layout::IntegerType::Pointer(is_signed) => match is_signed { + true => chalk_ir::Scalar::Int(chalk_ir::IntTy::Isize), + false => chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize), + }, + hir_def::layout::IntegerType::Fixed(size, is_signed) => match is_signed { + true => chalk_ir::Scalar::Int(match size { + hir_def::layout::Integer::I8 => chalk_ir::IntTy::I8, + hir_def::layout::Integer::I16 => chalk_ir::IntTy::I16, + hir_def::layout::Integer::I32 => chalk_ir::IntTy::I32, + hir_def::layout::Integer::I64 => chalk_ir::IntTy::I64, + hir_def::layout::Integer::I128 => chalk_ir::IntTy::I128, + }), + false => chalk_ir::Scalar::Uint(match size { + hir_def::layout::Integer::I8 => chalk_ir::UintTy::U8, + hir_def::layout::Integer::I16 => chalk_ir::UintTy::U16, + hir_def::layout::Integer::I32 => chalk_ir::UintTy::U32, + hir_def::layout::Integer::I64 => chalk_ir::UintTy::U64, + hir_def::layout::Integer::I128 => chalk_ir::UintTy::U128, + }), + }, + }) + .intern(Interner); } chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U8)).intern(Interner) } @@ -142,10 +142,10 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { ) -> Option { if let TyKind::BoundVar(bv) = ty.kind(Interner) { let binders = binders.as_slice(Interner); - if bv.debruijn == DebruijnIndex::INNERMOST { - if let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind { - return Some(tk); - } + if bv.debruijn == DebruijnIndex::INNERMOST + && let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind + { + return Some(tk); } } None diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs index 14b9cd203f60a..f30ec839a0096 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs @@ -342,10 +342,10 @@ pub(crate) fn eval_to_const( return c; } } - if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, ctx.body, &infer, expr) { - if let Ok((Ok(result), _)) = interpret_mir(db, Arc::new(mir_body), true, None) { - return result; - } + if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, ctx.body, &infer, expr) + && let Ok((Ok(result), _)) = interpret_mir(db, Arc::new(mir_body), true, None) + { + return result; } unknown_const(infer[expr].clone()) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs index 40fe3073cf2cd..0815e62f87eef 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs @@ -657,10 +657,10 @@ impl<'a> DeclValidator<'a> { } fn is_trait_impl_container(&self, container_id: ItemContainerId) -> bool { - if let ItemContainerId::ImplId(impl_id) = container_id { - if self.db.impl_trait(impl_id).is_some() { - return true; - } + if let ItemContainerId::ImplId(impl_id) = container_id + && self.db.impl_trait(impl_id).is_some() + { + return true; } false } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index cc531f076dd1f..b26bd2b8fa9c4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -528,15 +528,15 @@ impl FilterMapNextChecker { return None; } - if *function_id == self.next_function_id? { - if let Some(prev_filter_map_expr_id) = self.prev_filter_map_expr_id { - let is_dyn_trait = self - .prev_receiver_ty - .as_ref() - .is_some_and(|it| it.strip_references().dyn_trait().is_some()); - if *receiver_expr_id == prev_filter_map_expr_id && !is_dyn_trait { - return Some(()); - } + if *function_id == self.next_function_id? + && let Some(prev_filter_map_expr_id) = self.prev_filter_map_expr_id + { + let is_dyn_trait = self + .prev_receiver_ty + .as_ref() + .is_some_and(|it| it.strip_references().dyn_trait().is_some()); + if *receiver_expr_id == prev_filter_map_expr_id && !is_dyn_trait { + return Some(()); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs index ca132fbdc454a..e803b56a1ed8f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs @@ -382,10 +382,10 @@ impl HirDisplay for Pat { let subpats = (0..num_fields).map(|i| { WriteWith(move |f| { let fid = LocalFieldId::from_raw((i as u32).into()); - if let Some(p) = subpatterns.get(i) { - if p.field == fid { - return p.pattern.hir_fmt(f); - } + if let Some(p) = subpatterns.get(i) + && p.field == fid + { + return p.pattern.hir_fmt(f); } if let Some(p) = subpatterns.iter().find(|p| p.field == fid) { p.pattern.hir_fmt(f) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index f6ad3c7aae2d8..827585e50693a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -272,10 +272,10 @@ impl<'db> UnsafeVisitor<'db> { if let Some(func) = callee.as_fn_def(self.db) { self.check_call(current, func); } - if let TyKind::Function(fn_ptr) = callee.kind(Interner) { - if fn_ptr.sig.safety == chalk_ir::Safety::Unsafe { - self.on_unsafe_op(current.into(), UnsafetyReason::UnsafeFnCall); - } + if let TyKind::Function(fn_ptr) = callee.kind(Interner) + && fn_ptr.sig.safety == chalk_ir::Safety::Unsafe + { + self.on_unsafe_op(current.into(), UnsafetyReason::UnsafeFnCall); } } Expr::Path(path) => { @@ -346,12 +346,11 @@ impl<'db> UnsafeVisitor<'db> { Expr::Cast { .. } => self.inside_assignment = inside_assignment, Expr::Field { .. } => { self.inside_assignment = inside_assignment; - if !inside_assignment { - if let Some(Either::Left(FieldId { parent: VariantId::UnionId(_), .. })) = + if !inside_assignment + && let Some(Either::Left(FieldId { parent: VariantId::UnionId(_), .. })) = self.infer.field_resolution(current) - { - self.on_unsafe_op(current.into(), UnsafetyReason::UnionField); - } + { + self.on_unsafe_op(current.into(), UnsafetyReason::UnionField); } } Expr::Unsafe { statements, .. } => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index b3760e3a3822a..8f35a3c214551 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -608,48 +608,46 @@ impl HirDisplay for ProjectionTy { // if we are projection on a type parameter, check if the projection target has bounds // itself, if so, we render them directly as `impl Bound` instead of the less useful // `::Assoc` - if !f.display_kind.is_source_code() { - if let TyKind::Placeholder(idx) = self_ty.kind(Interner) { - if !f.bounds_formatting_ctx.contains(self) { - let db = f.db; - let id = from_placeholder_idx(db, *idx); - let generics = generics(db, id.parent); - - let substs = generics.placeholder_subst(db); - let bounds = db - .generic_predicates(id.parent) - .iter() - .map(|pred| pred.clone().substitute(Interner, &substs)) - .filter(|wc| match wc.skip_binders() { - WhereClause::Implemented(tr) => { - matches!( - tr.self_type_parameter(Interner).kind(Interner), - TyKind::Alias(_) - ) - } - WhereClause::TypeOutlives(t) => { - matches!(t.ty.kind(Interner), TyKind::Alias(_)) - } - // We shouldn't be here if these exist - WhereClause::AliasEq(_) => false, - WhereClause::LifetimeOutlives(_) => false, - }) - .collect::>(); - if !bounds.is_empty() { - return f.format_bounds_with(self.clone(), |f| { - write_bounds_like_dyn_trait_with_prefix( - f, - "impl", - Either::Left( - &TyKind::Alias(AliasTy::Projection(self.clone())) - .intern(Interner), - ), - &bounds, - SizedByDefault::NotSized, - ) - }); - } - } + if !f.display_kind.is_source_code() + && let TyKind::Placeholder(idx) = self_ty.kind(Interner) + && !f.bounds_formatting_ctx.contains(self) + { + let db = f.db; + let id = from_placeholder_idx(db, *idx); + let generics = generics(db, id.parent); + + let substs = generics.placeholder_subst(db); + let bounds = db + .generic_predicates(id.parent) + .iter() + .map(|pred| pred.clone().substitute(Interner, &substs)) + .filter(|wc| { + let ty = match wc.skip_binders() { + WhereClause::Implemented(tr) => tr.self_type_parameter(Interner), + WhereClause::TypeOutlives(t) => t.ty.clone(), + // We shouldn't be here if these exist + WhereClause::AliasEq(_) | WhereClause::LifetimeOutlives(_) => { + return false; + } + }; + let TyKind::Alias(AliasTy::Projection(proj)) = ty.kind(Interner) else { + return false; + }; + proj == self + }) + .collect::>(); + if !bounds.is_empty() { + return f.format_bounds_with(self.clone(), |f| { + write_bounds_like_dyn_trait_with_prefix( + f, + "impl", + Either::Left( + &TyKind::Alias(AliasTy::Projection(self.clone())).intern(Interner), + ), + &bounds, + SizedByDefault::NotSized, + ) + }); } } @@ -1860,18 +1858,13 @@ fn write_bounds_like_dyn_trait( write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?; f.end_location_link(); if is_fn_trait { - if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) { - if let Some(args) = + if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) + && let Some(args) = params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple()) - { - write!(f, "(")?; - hir_fmt_generic_arguments( - f, - args.as_slice(Interner), - self_.ty(Interner), - )?; - write!(f, ")")?; - } + { + write!(f, "(")?; + hir_fmt_generic_arguments(f, args.as_slice(Interner), self_.ty(Interner))?; + write!(f, ")")?; } } else { let params = generic_args_sans_defaults( @@ -1879,13 +1872,13 @@ fn write_bounds_like_dyn_trait( Some(trait_.into()), trait_ref.substitution.as_slice(Interner), ); - if let [self_, params @ ..] = params { - if !params.is_empty() { - write!(f, "<")?; - hir_fmt_generic_arguments(f, params, self_.ty(Interner))?; - // there might be assoc type bindings, so we leave the angle brackets open - angle_open = true; - } + if let [self_, params @ ..] = params + && !params.is_empty() + { + write!(f, "<")?; + hir_fmt_generic_arguments(f, params, self_.ty(Interner))?; + // there might be assoc type bindings, so we leave the angle brackets open + angle_open = true; } } } @@ -2443,11 +2436,11 @@ impl HirDisplayWithExpressionStore for Path { generic_args.args[0].hir_fmt(f, store)?; } } - if let Some(ret) = generic_args.bindings[0].type_ref { - if !matches!(&store[ret], TypeRef::Tuple(v) if v.is_empty()) { - write!(f, " -> ")?; - ret.hir_fmt(f, store)?; - } + if let Some(ret) = generic_args.bindings[0].type_ref + && !matches!(&store[ret], TypeRef::Tuple(v) if v.is_empty()) + { + write!(f, " -> ")?; + ret.hir_fmt(f, store)?; } } hir_def::expr_store::path::GenericArgsParentheses::No => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs index 30949c83bfae1..6294d683e6c02 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs @@ -136,16 +136,15 @@ pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> b let predicates = predicates.iter().map(|p| p.skip_binders().skip_binders().clone()); elaborate_clause_supertraits(db, predicates).any(|pred| match pred { WhereClause::Implemented(trait_ref) => { - if from_chalk_trait_id(trait_ref.trait_id) == sized { - if let TyKind::BoundVar(it) = + if from_chalk_trait_id(trait_ref.trait_id) == sized + && let TyKind::BoundVar(it) = *trait_ref.self_type_parameter(Interner).kind(Interner) - { - // Since `generic_predicates` is `Binder>`, the `DebrujinIndex` of - // self-parameter is `1` - return it - .index_if_bound_at(DebruijnIndex::ONE) - .is_some_and(|idx| idx == trait_self_param_idx); - } + { + // Since `generic_predicates` is `Binder>`, the `DebrujinIndex` of + // self-parameter is `1` + return it + .index_if_bound_at(DebruijnIndex::ONE) + .is_some_and(|idx| idx == trait_self_param_idx); } false } @@ -401,10 +400,10 @@ where cb(MethodViolationCode::ReferencesSelfOutput)?; } - if !func_data.is_async() { - if let Some(mvc) = contains_illegal_impl_trait_in_trait(db, &sig) { - cb(mvc)?; - } + if !func_data.is_async() + && let Some(mvc) = contains_illegal_impl_trait_in_trait(db, &sig) + { + cb(mvc)?; } let generic_params = db.generic_params(func.into()); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 7c39afa0ef896..86345b23364d3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -902,12 +902,12 @@ impl<'db> InferenceContext<'db> { return false; } - if let UnresolvedMethodCall { field_with_same_name, .. } = diagnostic { - if let Some(ty) = field_with_same_name { - *ty = table.resolve_completely(ty.clone()); - if ty.contains_unknown() { - *field_with_same_name = None; - } + if let UnresolvedMethodCall { field_with_same_name, .. } = diagnostic + && let Some(ty) = field_with_same_name + { + *ty = table.resolve_completely(ty.clone()); + if ty.contains_unknown() { + *field_with_same_name = None; } } } @@ -1010,12 +1010,12 @@ impl<'db> InferenceContext<'db> { param_tys.push(va_list_ty); } let mut param_tys = param_tys.into_iter().chain(iter::repeat(self.table.new_type_var())); - if let Some(self_param) = self.body.self_param { - if let Some(ty) = param_tys.next() { - let ty = self.insert_type_vars(ty); - let ty = self.normalize_associated_types_in(ty); - self.write_binding_ty(self_param, ty); - } + if let Some(self_param) = self.body.self_param + && let Some(ty) = param_tys.next() + { + let ty = self.insert_type_vars(ty); + let ty = self.normalize_associated_types_in(ty); + self.write_binding_ty(self_param, ty); } let mut tait_candidates = FxHashSet::default(); for (ty, pat) in param_tys.zip(&*self.body.params) { @@ -1199,20 +1199,19 @@ impl<'db> InferenceContext<'db> { ) -> std::ops::ControlFlow { let ty = self.table.resolve_ty_shallow(ty); - if let TyKind::OpaqueType(id, _) = ty.kind(Interner) { - if let ImplTraitId::TypeAliasImplTrait(alias_id, _) = + if let TyKind::OpaqueType(id, _) = ty.kind(Interner) + && let ImplTraitId::TypeAliasImplTrait(alias_id, _) = self.db.lookup_intern_impl_trait_id((*id).into()) - { - let loc = self.db.lookup_intern_type_alias(alias_id); - match loc.container { - ItemContainerId::ImplId(impl_id) => { - self.assocs.insert(*id, (impl_id, ty.clone())); - } - ItemContainerId::ModuleId(..) | ItemContainerId::ExternBlockId(..) => { - self.non_assocs.insert(*id, ty.clone()); - } - _ => {} + { + let loc = self.db.lookup_intern_type_alias(alias_id); + match loc.container { + ItemContainerId::ImplId(impl_id) => { + self.assocs.insert(*id, (impl_id, ty.clone())); + } + ItemContainerId::ModuleId(..) | ItemContainerId::ExternBlockId(..) => { + self.non_assocs.insert(*id, ty.clone()); } + _ => {} } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs index 4e95eca3f9402..f0a4167f8e250 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs @@ -233,26 +233,25 @@ impl CastCheck { F: FnMut(ExprId, Vec), { // Mutability order is opposite to rustc. `Mut < Not` - if m_expr <= m_cast { - if let TyKind::Array(ety, _) = t_expr.kind(Interner) { - // Coerce to a raw pointer so that we generate RawPtr in MIR. - let array_ptr_type = TyKind::Raw(m_expr, t_expr.clone()).intern(Interner); - if let Ok((adj, _)) = table.coerce(&self.expr_ty, &array_ptr_type, CoerceNever::Yes) - { - apply_adjustments(self.source_expr, adj); - } else { - never!( - "could not cast from reference to array to pointer to array ({:?} to {:?})", - self.expr_ty, - array_ptr_type - ); - } + if m_expr <= m_cast + && let TyKind::Array(ety, _) = t_expr.kind(Interner) + { + // Coerce to a raw pointer so that we generate RawPtr in MIR. + let array_ptr_type = TyKind::Raw(m_expr, t_expr.clone()).intern(Interner); + if let Ok((adj, _)) = table.coerce(&self.expr_ty, &array_ptr_type, CoerceNever::Yes) { + apply_adjustments(self.source_expr, adj); + } else { + never!( + "could not cast from reference to array to pointer to array ({:?} to {:?})", + self.expr_ty, + array_ptr_type + ); + } - // This is a less strict condition than rustc's `demand_eqtype`, - // but false negative is better than false positive - if table.coerce(ety, t_cast, CoerceNever::Yes).is_ok() { - return Ok(()); - } + // This is a less strict condition than rustc's `demand_eqtype`, + // but false negative is better than false positive + if table.coerce(ety, t_cast, CoerceNever::Yes).is_ok() { + return Ok(()); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index c3029bf2b59ad..8024c1a9a4e92 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -176,12 +176,12 @@ impl InferenceContext<'_> { } // Deduction based on the expected `dyn Fn` is done separately. - if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner) { - if let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty) { - let expected_sig_ty = TyKind::Function(sig).intern(Interner); + if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner) + && let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty) + { + let expected_sig_ty = TyKind::Function(sig).intern(Interner); - self.unify(sig_ty, &expected_sig_ty); - } + self.unify(sig_ty, &expected_sig_ty); } } @@ -208,14 +208,13 @@ impl InferenceContext<'_> { alias: AliasTy::Projection(projection_ty), ty: projected_ty, }) = bound.skip_binders() - { - if let Some(sig) = self.deduce_sig_from_projection( + && let Some(sig) = self.deduce_sig_from_projection( closure_kind, projection_ty, projected_ty, - ) { - return Some(sig); - } + ) + { + return Some(sig); } None }); @@ -254,55 +253,44 @@ impl InferenceContext<'_> { let mut expected_kind = None; for clause in elaborate_clause_supertraits(self.db, clauses.rev()) { - if expected_sig.is_none() { - if let WhereClause::AliasEq(AliasEq { - alias: AliasTy::Projection(projection), - ty, - }) = &clause - { - let inferred_sig = - self.deduce_sig_from_projection(closure_kind, projection, ty); - // Make sure that we didn't infer a signature that mentions itself. - // This can happen when we elaborate certain supertrait bounds that - // mention projections containing the `Self` type. See rust-lang/rust#105401. - struct MentionsTy<'a> { - expected_ty: &'a Ty, - } - impl TypeVisitor for MentionsTy<'_> { - type BreakTy = (); + if expected_sig.is_none() + && let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) = + &clause + { + let inferred_sig = self.deduce_sig_from_projection(closure_kind, projection, ty); + // Make sure that we didn't infer a signature that mentions itself. + // This can happen when we elaborate certain supertrait bounds that + // mention projections containing the `Self` type. See rust-lang/rust#105401. + struct MentionsTy<'a> { + expected_ty: &'a Ty, + } + impl TypeVisitor for MentionsTy<'_> { + type BreakTy = (); - fn interner(&self) -> Interner { - Interner - } + fn interner(&self) -> Interner { + Interner + } - fn as_dyn( - &mut self, - ) -> &mut dyn TypeVisitor - { - self - } + fn as_dyn( + &mut self, + ) -> &mut dyn TypeVisitor + { + self + } - fn visit_ty( - &mut self, - t: &Ty, - db: chalk_ir::DebruijnIndex, - ) -> ControlFlow<()> { - if t == self.expected_ty { - ControlFlow::Break(()) - } else { - t.super_visit_with(self, db) - } + fn visit_ty(&mut self, t: &Ty, db: chalk_ir::DebruijnIndex) -> ControlFlow<()> { + if t == self.expected_ty { + ControlFlow::Break(()) + } else { + t.super_visit_with(self, db) } } - if inferred_sig - .visit_with( - &mut MentionsTy { expected_ty }, - chalk_ir::DebruijnIndex::INNERMOST, - ) - .is_continue() - { - expected_sig = inferred_sig; - } + } + if inferred_sig + .visit_with(&mut MentionsTy { expected_ty }, chalk_ir::DebruijnIndex::INNERMOST) + .is_continue() + { + expected_sig = inferred_sig; } } @@ -617,11 +605,10 @@ impl HirPlace { if let CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow, }) = current_capture + && self.projections[len..].contains(&ProjectionElem::Deref) { - if self.projections[len..].contains(&ProjectionElem::Deref) { - current_capture = - CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture }); - } + current_capture = + CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture }); } current_capture } @@ -1076,12 +1063,11 @@ impl InferenceContext<'_> { Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }), Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared), }; - if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) { - if let Some(place) = + if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) + && let Some(place) = apply_adjusts_to_place(&mut self.current_capture_span_stack, place, rest) - { - self.add_capture(place, capture_kind); - } + { + self.add_capture(place, capture_kind); } self.walk_expr_with_adjust(tgt_expr, rest); } @@ -1169,15 +1155,15 @@ impl InferenceContext<'_> { } } self.walk_expr(*expr); - if let Some(discr_place) = self.place_of_expr(*expr) { - if self.is_upvar(&discr_place) { - let mut capture_mode = None; - for arm in arms.iter() { - self.walk_pat(&mut capture_mode, arm.pat); - } - if let Some(c) = capture_mode { - self.push_capture(discr_place, c); - } + if let Some(discr_place) = self.place_of_expr(*expr) + && self.is_upvar(&discr_place) + { + let mut capture_mode = None; + for arm in arms.iter() { + self.walk_pat(&mut capture_mode, arm.pat); + } + if let Some(c) = capture_mode { + self.push_capture(discr_place, c); } } } @@ -1209,13 +1195,11 @@ impl InferenceContext<'_> { let mutability = 'b: { if let Some(deref_trait) = self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait()) - { - if let Some(deref_fn) = deref_trait + && let Some(deref_fn) = deref_trait .trait_items(self.db) .method_by_name(&Name::new_symbol_root(sym::deref_mut)) - { - break 'b deref_fn == f; - } + { + break 'b deref_fn == f; } false }; @@ -1405,10 +1389,10 @@ impl InferenceContext<'_> { fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty { let mut ty = None; - if let Some(it) = self.result.expr_adjustments.get(&e) { - if let Some(it) = it.last() { - ty = Some(it.target.clone()); - } + if let Some(it) = self.result.expr_adjustments.get(&e) + && let Some(it) = it.last() + { + ty = Some(it.target.clone()); } ty.unwrap_or_else(|| self.expr_ty(e)) } @@ -1793,10 +1777,10 @@ impl InferenceContext<'_> { } pub(super) fn add_current_closure_dependency(&mut self, dep: ClosureId) { - if let Some(c) = self.current_closure { - if !dep_creates_cycle(&self.closure_dependencies, &mut FxHashSet::default(), c, dep) { - self.closure_dependencies.entry(c).or_default().push(dep); - } + if let Some(c) = self.current_closure + && !dep_creates_cycle(&self.closure_dependencies, &mut FxHashSet::default(), c, dep) + { + self.closure_dependencies.entry(c).or_default().push(dep); } fn dep_creates_cycle( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs index 39bd90849fe8f..761a2564aa799 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs @@ -164,14 +164,14 @@ impl CoerceMany { // - [Comment from rustc](https://github.com/rust-lang/rust/blob/5ff18d0eaefd1bd9ab8ec33dab2404a44e7631ed/compiler/rustc_hir_typeck/src/coercion.rs#L1334-L1335) // First try to coerce the new expression to the type of the previous ones, // but only if the new expression has no coercion already applied to it. - if expr.is_none_or(|expr| !ctx.result.expr_adjustments.contains_key(&expr)) { - if let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes) { - self.final_ty = Some(res); - if let Some(expr) = expr { - self.expressions.push(expr); - } - return; + if expr.is_none_or(|expr| !ctx.result.expr_adjustments.contains_key(&expr)) + && let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes) + { + self.final_ty = Some(res); + if let Some(expr) = expr { + self.expressions.push(expr); } + return; } if let Ok((adjustments, res)) = @@ -322,18 +322,13 @@ impl InferenceTable<'_> { // If we are coercing into a TAIT, coerce into its proxy inference var, instead. let mut to_ty = to_ty; let _to; - if let Some(tait_table) = &self.tait_coercion_table { - if let TyKind::OpaqueType(opaque_ty_id, _) = to_ty.kind(Interner) { - if !matches!( - from_ty.kind(Interner), - TyKind::InferenceVar(..) | TyKind::OpaqueType(..) - ) { - if let Some(ty) = tait_table.get(opaque_ty_id) { - _to = ty.clone(); - to_ty = &_to; - } - } - } + if let Some(tait_table) = &self.tait_coercion_table + && let TyKind::OpaqueType(opaque_ty_id, _) = to_ty.kind(Interner) + && !matches!(from_ty.kind(Interner), TyKind::InferenceVar(..) | TyKind::OpaqueType(..)) + && let Some(ty) = tait_table.get(opaque_ty_id) + { + _to = ty.clone(); + to_ty = &_to; } // Consider coercing the subtype to a DST @@ -594,14 +589,13 @@ impl InferenceTable<'_> { F: FnOnce(Ty) -> Vec, G: FnOnce(Ty) -> Vec, { - if let TyKind::Function(to_fn_ptr) = to_ty.kind(Interner) { - if let (chalk_ir::Safety::Safe, chalk_ir::Safety::Unsafe) = + if let TyKind::Function(to_fn_ptr) = to_ty.kind(Interner) + && let (chalk_ir::Safety::Safe, chalk_ir::Safety::Unsafe) = (from_fn_ptr.sig.safety, to_fn_ptr.sig.safety) - { - let from_unsafe = - TyKind::Function(safe_to_unsafe_fn_ty(from_fn_ptr.clone())).intern(Interner); - return self.unify_and(&from_unsafe, to_ty, to_unsafe); - } + { + let from_unsafe = + TyKind::Function(safe_to_unsafe_fn_ty(from_fn_ptr.clone())).intern(Interner); + return self.unify_and(&from_unsafe, to_ty, to_unsafe); } self.unify_and(&from_ty, to_ty, normal) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index d43c99fc28271..16fc2bfc0631f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -653,19 +653,18 @@ impl InferenceContext<'_> { // FIXME: Note down method resolution her match op { UnaryOp::Deref => { - if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref) { - if let Some(deref_fn) = deref_trait + if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref) + && let Some(deref_fn) = deref_trait .trait_items(self.db) .method_by_name(&Name::new_symbol_root(sym::deref)) - { - // FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that - // the mutability is not wrong, and will be fixed in `self.infer_mut`). - self.write_method_resolution( - tgt_expr, - deref_fn, - Substitution::empty(Interner), - ); - } + { + // FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that + // the mutability is not wrong, and will be fixed in `self.infer_mut`). + self.write_method_resolution( + tgt_expr, + deref_fn, + Substitution::empty(Interner), + ); } if let Some(derefed) = builtin_deref(self.table.db, &inner_ty, true) { self.resolve_ty_shallow(derefed) @@ -1387,28 +1386,28 @@ impl InferenceContext<'_> { let ret_ty = match method_ty.callable_sig(self.db) { Some(sig) => { let p_left = &sig.params()[0]; - if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. }) { - if let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner) { - self.write_expr_adj( - lhs, - Box::new([Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), - target: p_left.clone(), - }]), - ); - } + if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. }) + && let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner) + { + self.write_expr_adj( + lhs, + Box::new([Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), + target: p_left.clone(), + }]), + ); } let p_right = &sig.params()[1]; - if matches!(op, BinaryOp::CmpOp(..)) { - if let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner) { - self.write_expr_adj( - rhs, - Box::new([Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), - target: p_right.clone(), - }]), - ); - } + if matches!(op, BinaryOp::CmpOp(..)) + && let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner) + { + self.write_expr_adj( + rhs, + Box::new([Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), + target: p_right.clone(), + }]), + ); } sig.ret().clone() } @@ -1664,14 +1663,12 @@ impl InferenceContext<'_> { Some((ty, field_id, adjustments, is_public)) => { self.write_expr_adj(receiver, adjustments.into_boxed_slice()); self.result.field_resolutions.insert(tgt_expr, field_id); - if !is_public { - if let Either::Left(field) = field_id { - // FIXME: Merge this diagnostic into UnresolvedField? - self.push_diagnostic(InferenceDiagnostic::PrivateField { - expr: tgt_expr, - field, - }); - } + if !is_public && let Either::Left(field) = field_id { + // FIXME: Merge this diagnostic into UnresolvedField? + self.push_diagnostic(InferenceDiagnostic::PrivateField { + expr: tgt_expr, + field, + }); } ty } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs index 3f7eba9dd18c3..c798e9e050a18 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs @@ -124,53 +124,41 @@ impl InferenceContext<'_> { self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) } &Expr::Index { base, index } => { - if mutability == Mutability::Mut { - if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { - if let Some(index_trait) = - LangItem::IndexMut.resolve_trait(self.db, self.table.trait_env.krate) - { - if let Some(index_fn) = index_trait - .trait_items(self.db) - .method_by_name(&Name::new_symbol_root(sym::index_mut)) - { - *f = index_fn; - let mut base_ty = None; - let base_adjustments = self - .result - .expr_adjustments - .get_mut(&base) - .and_then(|it| it.last_mut()); - if let Some(Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(_, mutability)), - target, - }) = base_adjustments - { - if let TyKind::Ref(_, _, ty) = target.kind(Interner) { - base_ty = Some(ty.clone()); - } - *mutability = Mutability::Mut; - } - - // Apply `IndexMut` obligation for non-assignee expr - if let Some(base_ty) = base_ty { - let index_ty = - if let Some(ty) = self.result.type_of_expr.get(index) { - ty.clone() - } else { - self.infer_expr( - index, - &Expectation::none(), - ExprIsRead::Yes, - ) - }; - let trait_ref = TyBuilder::trait_ref(self.db, index_trait) - .push(base_ty) - .fill(|_| index_ty.clone().cast(Interner)) - .build(); - self.push_obligation(trait_ref.cast(Interner)); - } - } + if mutability == Mutability::Mut + && let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) + && let Some(index_trait) = + LangItem::IndexMut.resolve_trait(self.db, self.table.trait_env.krate) + && let Some(index_fn) = index_trait + .trait_items(self.db) + .method_by_name(&Name::new_symbol_root(sym::index_mut)) + { + *f = index_fn; + let mut base_ty = None; + let base_adjustments = + self.result.expr_adjustments.get_mut(&base).and_then(|it| it.last_mut()); + if let Some(Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(_, mutability)), + target, + }) = base_adjustments + { + if let TyKind::Ref(_, _, ty) = target.kind(Interner) { + base_ty = Some(ty.clone()); } + *mutability = Mutability::Mut; + } + + // Apply `IndexMut` obligation for non-assignee expr + if let Some(base_ty) = base_ty { + let index_ty = if let Some(ty) = self.result.type_of_expr.get(index) { + ty.clone() + } else { + self.infer_expr(index, &Expectation::none(), ExprIsRead::Yes) + }; + let trait_ref = TyBuilder::trait_ref(self.db, index_trait) + .push(base_ty) + .fill(|_| index_ty.clone().cast(Interner)) + .build(); + self.push_obligation(trait_ref.cast(Interner)); } } self.infer_mut_expr(base, mutability); @@ -178,28 +166,23 @@ impl InferenceContext<'_> { } Expr::UnaryOp { expr, op: UnaryOp::Deref } => { let mut mutability = mutability; - if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { - if mutability == Mutability::Mut { - if let Some(deref_trait) = - LangItem::DerefMut.resolve_trait(self.db, self.table.trait_env.krate) - { - let ty = self.result.type_of_expr.get(*expr); - let is_mut_ptr = ty.is_some_and(|ty| { - let ty = self.table.resolve_ty_shallow(ty); - matches!( - ty.kind(Interner), - chalk_ir::TyKind::Raw(Mutability::Mut, _) - ) - }); - if is_mut_ptr { - mutability = Mutability::Not; - } else if let Some(deref_fn) = deref_trait - .trait_items(self.db) - .method_by_name(&Name::new_symbol_root(sym::deref_mut)) - { - *f = deref_fn; - } - } + if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) + && mutability == Mutability::Mut + && let Some(deref_trait) = + LangItem::DerefMut.resolve_trait(self.db, self.table.trait_env.krate) + { + let ty = self.result.type_of_expr.get(*expr); + let is_mut_ptr = ty.is_some_and(|ty| { + let ty = self.table.resolve_ty_shallow(ty); + matches!(ty.kind(Interner), chalk_ir::TyKind::Raw(Mutability::Mut, _)) + }); + if is_mut_ptr { + mutability = Mutability::Not; + } else if let Some(deref_fn) = deref_trait + .trait_items(self.db) + .method_by_name(&Name::new_symbol_root(sym::deref_mut)) + { + *f = deref_fn; } } self.infer_mut_expr(*expr, mutability); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index 18288b718f76d..707bec0fce4ce 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -498,12 +498,12 @@ impl InferenceContext<'_> { // If `expected` is an infer ty, we try to equate it to an array if the given pattern // allows it. See issue #16609 - if self.pat_is_irrefutable(decl) && expected.is_ty_var() { - if let Some(resolved_array_ty) = + if self.pat_is_irrefutable(decl) + && expected.is_ty_var() + && let Some(resolved_array_ty) = self.try_resolve_slice_ty_to_array_ty(prefix, suffix, slice) - { - self.unify(&expected, &resolved_array_ty); - } + { + self.unify(&expected, &resolved_array_ty); } let expected = self.resolve_ty_shallow(&expected); @@ -539,17 +539,16 @@ impl InferenceContext<'_> { fn infer_lit_pat(&mut self, expr: ExprId, expected: &Ty) -> Ty { // Like slice patterns, byte string patterns can denote both `&[u8; N]` and `&[u8]`. - if let Expr::Literal(Literal::ByteString(_)) = self.body[expr] { - if let Some((inner, ..)) = expected.as_reference() { - let inner = self.resolve_ty_shallow(inner); - if matches!(inner.kind(Interner), TyKind::Slice(_)) { - let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner); - let slice_ty = TyKind::Slice(elem_ty).intern(Interner); - let ty = - TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty).intern(Interner); - self.write_expr_ty(expr, ty.clone()); - return ty; - } + if let Expr::Literal(Literal::ByteString(_)) = self.body[expr] + && let Some((inner, ..)) = expected.as_reference() + { + let inner = self.resolve_ty_shallow(inner); + if matches!(inner.kind(Interner), TyKind::Slice(_)) { + let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner); + let slice_ty = TyKind::Slice(elem_ty).intern(Interner); + let ty = TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty).intern(Interner); + self.write_expr_ty(expr, ty.clone()); + return ty; } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index d61e7de6672f1..afee9606bd5f8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -830,10 +830,10 @@ fn named_associated_type_shorthand_candidates( let data = t.hir_trait_id().trait_items(db); for (name, assoc_id) in &data.items { - if let AssocItemId::TypeAliasId(alias) = assoc_id { - if let Some(result) = cb(name, &t, *alias) { - return Some(result); - } + if let AssocItemId::TypeAliasId(alias) = assoc_id + && let Some(result) = cb(name, &t, *alias) + { + return Some(result); } } None diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs index 5c06234fa077f..9519c38eeddfd 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs @@ -360,15 +360,14 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { } } - if let Some(enum_segment) = enum_segment { - if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) - && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) - { - self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { - segment: (enum_segment + 1) as u32, - reason: GenericArgsProhibitedReason::EnumVariant, - }); - } + if let Some(enum_segment) = enum_segment + && segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) + && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) + { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: (enum_segment + 1) as u32, + reason: GenericArgsProhibitedReason::EnumVariant, + }); } self.handle_type_ns_resolution(&resolution); @@ -417,15 +416,14 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { } } - if let Some(enum_segment) = enum_segment { - if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) - && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) - { - self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { - segment: (enum_segment + 1) as u32, - reason: GenericArgsProhibitedReason::EnumVariant, - }); - } + if let Some(enum_segment) = enum_segment + && segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) + && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) + { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: (enum_segment + 1) as u32, + reason: GenericArgsProhibitedReason::EnumVariant, + }); } match &res { @@ -576,13 +574,12 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { // This simplifies the code a bit. let penultimate_idx = self.current_segment_idx.wrapping_sub(1); let penultimate = self.segments.get(penultimate_idx); - if let Some(penultimate) = penultimate { - if self.current_or_prev_segment.args_and_bindings.is_none() - && penultimate.args_and_bindings.is_some() - { - self.current_segment_idx = penultimate_idx; - self.current_or_prev_segment = penultimate; - } + if let Some(penultimate) = penultimate + && self.current_or_prev_segment.args_and_bindings.is_none() + && penultimate.args_and_bindings.is_some() + { + self.current_segment_idx = penultimate_idx; + self.current_or_prev_segment = penultimate; } var.lookup(self.ctx.db).parent.into() } @@ -607,37 +604,36 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { ) -> Substitution { let mut lifetime_elision = self.ctx.lifetime_elision.clone(); - if let Some(args) = self.current_or_prev_segment.args_and_bindings { - if args.parenthesized != GenericArgsParentheses::No { - let prohibit_parens = match def { - GenericDefId::TraitId(trait_) => { - // RTN is prohibited anyways if we got here. - let is_rtn = - args.parenthesized == GenericArgsParentheses::ReturnTypeNotation; - let is_fn_trait = self - .ctx - .db - .trait_signature(trait_) - .flags - .contains(TraitFlags::RUSTC_PAREN_SUGAR); - is_rtn || !is_fn_trait - } - _ => true, - }; - - if prohibit_parens { - let segment = self.current_segment_u32(); - self.on_diagnostic( - PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment }, - ); - - return TyBuilder::unknown_subst(self.ctx.db, def); + if let Some(args) = self.current_or_prev_segment.args_and_bindings + && args.parenthesized != GenericArgsParentheses::No + { + let prohibit_parens = match def { + GenericDefId::TraitId(trait_) => { + // RTN is prohibited anyways if we got here. + let is_rtn = args.parenthesized == GenericArgsParentheses::ReturnTypeNotation; + let is_fn_trait = self + .ctx + .db + .trait_signature(trait_) + .flags + .contains(TraitFlags::RUSTC_PAREN_SUGAR); + is_rtn || !is_fn_trait } + _ => true, + }; - // `Fn()`-style generics are treated like functions for the purpose of lifetime elision. - lifetime_elision = - LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false }; + if prohibit_parens { + let segment = self.current_segment_u32(); + self.on_diagnostic( + PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment }, + ); + + return TyBuilder::unknown_subst(self.ctx.db, def); } + + // `Fn()`-style generics are treated like functions for the purpose of lifetime elision. + lifetime_elision = + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false }; } self.substs_from_args_and_bindings( @@ -753,18 +749,20 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { match param { GenericParamDataRef::LifetimeParamData(_) => error_lifetime().cast(Interner), GenericParamDataRef::TypeParamData(param) => { - if !infer_args && param.default.is_some() { - if let Some(default) = default() { - return default; - } + if !infer_args + && param.default.is_some() + && let Some(default) = default() + { + return default; } TyKind::Error.intern(Interner).cast(Interner) } GenericParamDataRef::ConstParamData(param) => { - if !infer_args && param.default.is_some() { - if let Some(default) = default() { - return default; - } + if !infer_args + && param.default.is_some() + && let Some(default) = default() + { + return default; } let GenericParamId::ConstParamId(const_id) = param_id else { unreachable!("non-const param ID for const param"); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index a6150a9bc1728..b22781e947013 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -581,15 +581,15 @@ impl ReceiverAdjustments { } if self.unsize_array { ty = 'it: { - if let TyKind::Ref(m, l, inner) = ty.kind(Interner) { - if let TyKind::Array(inner, _) = inner.kind(Interner) { - break 'it TyKind::Ref( - *m, - l.clone(), - TyKind::Slice(inner.clone()).intern(Interner), - ) - .intern(Interner); - } + if let TyKind::Ref(m, l, inner) = ty.kind(Interner) + && let TyKind::Array(inner, _) = inner.kind(Interner) + { + break 'it TyKind::Ref( + *m, + l.clone(), + TyKind::Slice(inner.clone()).intern(Interner), + ) + .intern(Interner); } // FIXME: report diagnostic if array unsizing happens without indirection. ty @@ -1549,11 +1549,11 @@ fn is_valid_impl_method_candidate( check_that!(receiver_ty.is_none()); check_that!(name.is_none_or(|n| n == item_name)); - if let Some(from_module) = visible_from_module { - if !db.assoc_visibility(c.into()).is_visible_from(db, from_module) { - cov_mark::hit!(const_candidate_not_visible); - return IsValidCandidate::NotVisible; - } + if let Some(from_module) = visible_from_module + && !db.assoc_visibility(c.into()).is_visible_from(db, from_module) + { + cov_mark::hit!(const_candidate_not_visible); + return IsValidCandidate::NotVisible; } let self_ty_matches = table.run_in_snapshot(|table| { let expected_self_ty = @@ -1638,11 +1638,11 @@ fn is_valid_impl_fn_candidate( let db = table.db; let data = db.function_signature(fn_id); - if let Some(from_module) = visible_from_module { - if !db.assoc_visibility(fn_id.into()).is_visible_from(db, from_module) { - cov_mark::hit!(autoderef_candidate_not_visible); - return IsValidCandidate::NotVisible; - } + if let Some(from_module) = visible_from_module + && !db.assoc_visibility(fn_id.into()).is_visible_from(db, from_module) + { + cov_mark::hit!(autoderef_candidate_not_visible); + return IsValidCandidate::NotVisible; } table.run_in_snapshot(|table| { let _p = tracing::info_span!("subst_for_def").entered(); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs index fb0c0dee095f1..52df851c30d13 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs @@ -559,10 +559,9 @@ fn mutability_of_locals( }, p, ) = value + && place_case(db, body, p) != ProjectionCase::Indirect { - if place_case(db, body, p) != ProjectionCase::Indirect { - push_mut_span(p.local, statement.span, &mut result); - } + push_mut_span(p.local, statement.span, &mut result); } } StatementKind::FakeRead(p) => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index 9a97bd6dbe293..dfb8ae704b996 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -1082,18 +1082,18 @@ impl Evaluator<'_> { let stack_size = { let mut stack_ptr = self.stack.len(); for (id, it) in body.locals.iter() { - if id == return_slot() { - if let Some(destination) = destination { - locals.ptr.insert(id, destination); - continue; - } + if id == return_slot() + && let Some(destination) = destination + { + locals.ptr.insert(id, destination); + continue; } let (size, align) = self.size_align_of_sized( &it.ty, &locals, "no unsized local in extending stack", )?; - while stack_ptr % align != 0 { + while !stack_ptr.is_multiple_of(align) { stack_ptr += 1; } let my_ptr = stack_ptr; @@ -1673,14 +1673,14 @@ impl Evaluator<'_> { if let Some(it) = goal(kind) { return Ok(it); } - if let TyKind::Adt(id, subst) = kind { - if let AdtId::StructId(struct_id) = id.0 { - let field_types = self.db.field_types(struct_id.into()); - if let Some(ty) = - field_types.iter().last().map(|it| it.1.clone().substitute(Interner, subst)) - { - return self.coerce_unsized_look_through_fields(&ty, goal); - } + if let TyKind::Adt(id, subst) = kind + && let AdtId::StructId(struct_id) = id.0 + { + let field_types = self.db.field_types(struct_id.into()); + if let Some(ty) = + field_types.iter().last().map(|it| it.1.clone().substitute(Interner, subst)) + { + return self.coerce_unsized_look_through_fields(&ty, goal); } } Err(MirEvalError::CoerceUnsizedError(ty.clone())) @@ -1778,17 +1778,15 @@ impl Evaluator<'_> { locals: &Locals, ) -> Result<(usize, Arc, Option<(usize, usize, i128)>)> { let adt = it.adt_id(self.db); - if let DefWithBodyId::VariantId(f) = locals.body.owner { - if let VariantId::EnumVariantId(it) = it { - if let AdtId::EnumId(e) = adt { - if f.lookup(self.db).parent == e { - // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and - // infinite sized type errors) we use a dummy layout - let i = self.const_eval_discriminant(it)?; - return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i)))); - } - } - } + if let DefWithBodyId::VariantId(f) = locals.body.owner + && let VariantId::EnumVariantId(it) = it + && let AdtId::EnumId(e) = adt + && f.lookup(self.db).parent == e + { + // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and + // infinite sized type errors) we use a dummy layout + let i = self.const_eval_discriminant(it)?; + return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i)))); } let layout = self.layout_adt(adt, subst)?; Ok(match &layout.variants { @@ -1909,10 +1907,10 @@ impl Evaluator<'_> { let name = const_id.name(self.db); MirEvalError::ConstEvalError(name, Box::new(e)) })?; - if let chalk_ir::ConstValue::Concrete(c) = &result_owner.data(Interner).value { - if let ConstScalar::Bytes(v, mm) = &c.interned { - break 'b (v, mm); - } + if let chalk_ir::ConstValue::Concrete(c) = &result_owner.data(Interner).value + && let ConstScalar::Bytes(v, mm) = &c.interned + { + break 'b (v, mm); } not_supported!("unevaluatable constant"); } @@ -2055,14 +2053,13 @@ impl Evaluator<'_> { .is_sized() .then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize))); } - if let DefWithBodyId::VariantId(f) = locals.body.owner { - if let Some((AdtId::EnumId(e), _)) = ty.as_adt() { - if f.lookup(self.db).parent == e { - // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and - // infinite sized type errors) we use a dummy size - return Ok(Some((16, 16))); - } - } + if let DefWithBodyId::VariantId(f) = locals.body.owner + && let Some((AdtId::EnumId(e), _)) = ty.as_adt() + && f.lookup(self.db).parent == e + { + // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and + // infinite sized type errors) we use a dummy size + return Ok(Some((16, 16))); } let layout = self.layout(ty); if self.assert_placeholder_ty_is_unused @@ -2103,7 +2100,7 @@ impl Evaluator<'_> { if !align.is_power_of_two() || align > 10000 { return Err(MirEvalError::UndefinedBehavior(format!("Alignment {align} is invalid"))); } - while self.heap.len() % align != 0 { + while !self.heap.len().is_multiple_of(align) { self.heap.push(0); } if size.checked_add(self.heap.len()).is_none_or(|x| x > self.memory_limit) { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index e9665d5ae9cf1..bb4c963a8ae15 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -119,25 +119,25 @@ impl Evaluator<'_> { destination.write_from_bytes(self, &result)?; return Ok(true); } - if let ItemContainerId::TraitId(t) = def.lookup(self.db).container { - if self.db.lang_attr(t.into()) == Some(LangItem::Clone) { - let [self_ty] = generic_args.as_slice(Interner) else { - not_supported!("wrong generic arg count for clone"); - }; - let Some(self_ty) = self_ty.ty(Interner) else { - not_supported!("wrong generic arg kind for clone"); - }; - // Clone has special impls for tuples and function pointers - if matches!( - self_ty.kind(Interner), - TyKind::Function(_) | TyKind::Tuple(..) | TyKind::Closure(..) - ) { - self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?; - return Ok(true); - } - // Return early to prevent caching clone as non special fn. - return Ok(false); + if let ItemContainerId::TraitId(t) = def.lookup(self.db).container + && self.db.lang_attr(t.into()) == Some(LangItem::Clone) + { + let [self_ty] = generic_args.as_slice(Interner) else { + not_supported!("wrong generic arg count for clone"); + }; + let Some(self_ty) = self_ty.ty(Interner) else { + not_supported!("wrong generic arg kind for clone"); + }; + // Clone has special impls for tuples and function pointers + if matches!( + self_ty.kind(Interner), + TyKind::Function(_) | TyKind::Tuple(..) | TyKind::Closure(..) + ) { + self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?; + return Ok(true); } + // Return early to prevent caching clone as non special fn. + return Ok(false); } self.not_special_fn_cache.borrow_mut().insert(def); Ok(false) @@ -1256,23 +1256,22 @@ impl Evaluator<'_> { let addr = tuple.interval.addr.offset(offset); args.push(IntervalAndTy::new(addr, field, self, locals)?); } - if let Some(target) = LangItem::FnOnce.resolve_trait(self.db, self.crate_id) { - if let Some(def) = target + if let Some(target) = LangItem::FnOnce.resolve_trait(self.db, self.crate_id) + && let Some(def) = target .trait_items(self.db) .method_by_name(&Name::new_symbol_root(sym::call_once)) - { - self.exec_fn_trait( - def, - &args, - // FIXME: wrong for manual impls of `FnOnce` - Substitution::empty(Interner), - locals, - destination, - None, - span, - )?; - return Ok(true); - } + { + self.exec_fn_trait( + def, + &args, + // FIXME: wrong for manual impls of `FnOnce` + Substitution::empty(Interner), + locals, + destination, + None, + span, + )?; + return Ok(true); } not_supported!("FnOnce was not available for executing const_eval_select"); } @@ -1367,12 +1366,11 @@ impl Evaluator<'_> { break; } } - if signed { - if let Some((&l, &r)) = lhs.iter().zip(rhs).next_back() { - if l != r { - result = (l as i8).cmp(&(r as i8)); - } - } + if signed + && let Some((&l, &r)) = lhs.iter().zip(rhs).next_back() + && l != r + { + result = (l as i8).cmp(&(r as i8)); } if let Some(e) = LangItem::Ordering.resolve_enum(self.db, self.crate_id) { let ty = self.db.ty(e.into()); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs index bc331a23d98e3..f554772904537 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs @@ -114,12 +114,11 @@ impl Evaluator<'_> { break; } } - if is_signed { - if let Some((&l, &r)) = l.iter().zip(r).next_back() { - if l != r { - result = (l as i8).cmp(&(r as i8)); - } - } + if is_signed + && let Some((&l, &r)) = l.iter().zip(r).next_back() + && l != r + { + result = (l as i8).cmp(&(r as i8)); } let result = match result { Ordering::Less => ["lt", "le", "ne"].contains(&name), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 07d814727293e..eb80e8706fa0c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -320,11 +320,11 @@ impl<'ctx> MirLowerCtx<'ctx> { expr_id: ExprId, current: BasicBlockId, ) -> Result> { - if !self.has_adjustments(expr_id) { - if let Expr::Literal(l) = &self.body[expr_id] { - let ty = self.expr_ty_without_adjust(expr_id); - return Ok(Some((self.lower_literal_to_operand(ty, l)?, current))); - } + if !self.has_adjustments(expr_id) + && let Expr::Literal(l) = &self.body[expr_id] + { + let ty = self.expr_ty_without_adjust(expr_id); + return Ok(Some((self.lower_literal_to_operand(ty, l)?, current))); } let Some((p, current)) = self.lower_expr_as_place(current, expr_id, true)? else { return Ok(None); @@ -1039,18 +1039,18 @@ impl<'ctx> MirLowerCtx<'ctx> { && rhs_ty.is_scalar() && (lhs_ty == rhs_ty || builtin_inequal_impls) }; - if !is_builtin { - if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) { - let func = Operand::from_fn(self.db, func_id, generic_args); - return self.lower_call_and_args( - func, - [*lhs, *rhs].into_iter(), - place, - current, - self.is_uninhabited(expr_id), - expr_id.into(), - ); - } + if !is_builtin + && let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) + { + let func = Operand::from_fn(self.db, func_id, generic_args); + return self.lower_call_and_args( + func, + [*lhs, *rhs].into_iter(), + place, + current, + self.is_uninhabited(expr_id), + expr_id.into(), + ); } if let hir_def::hir::BinaryOp::Assignment { op: Some(op) } = op { // last adjustment is `&mut` which we don't want it. @@ -1596,10 +1596,10 @@ impl<'ctx> MirLowerCtx<'ctx> { fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty { let mut ty = None; - if let Some(it) = self.infer.expr_adjustments.get(&e) { - if let Some(it) = it.last() { - ty = Some(it.target.clone()); - } + if let Some(it) = self.infer.expr_adjustments.get(&e) + && let Some(it) = it.last() + { + ty = Some(it.target.clone()); } ty.unwrap_or_else(|| self.expr_ty_without_adjust(e)) } @@ -1848,13 +1848,13 @@ impl<'ctx> MirLowerCtx<'ctx> { self.result.param_locals.extend(params.clone().map(|(it, ty)| { let local_id = self.result.locals.alloc(Local { ty }); self.drop_scopes.last_mut().unwrap().locals.push(local_id); - if let Pat::Bind { id, subpat: None } = self.body[it] { - if matches!( + if let Pat::Bind { id, subpat: None } = self.body[it] + && matches!( self.body[id].mode, BindingAnnotation::Unannotated | BindingAnnotation::Mutable - ) { - self.result.binding_locals.insert(id, local_id); - } + ) + { + self.result.binding_locals.insert(id, local_id); } local_id })); @@ -1887,10 +1887,10 @@ impl<'ctx> MirLowerCtx<'ctx> { .into_iter() .skip(base_param_count + self_binding.is_some() as usize); for ((param, _), local) in params.zip(local_params) { - if let Pat::Bind { id, .. } = self.body[param] { - if local == self.binding_local(id)? { - continue; - } + if let Pat::Bind { id, .. } = self.body[param] + && local == self.binding_local(id)? + { + continue; } let r = self.pattern_match(current, None, local.into(), param)?; if let Some(b) = r.1 { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs index e074c2d558e84..42a14664626f0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs @@ -189,17 +189,14 @@ impl MirLowerCtx<'_> { self.expr_ty_without_adjust(expr_id), expr_id.into(), 'b: { - if let Some((f, _)) = self.infer.method_resolution(expr_id) { - if let Some(deref_trait) = + if let Some((f, _)) = self.infer.method_resolution(expr_id) + && let Some(deref_trait) = self.resolve_lang_item(LangItem::DerefMut)?.as_trait() - { - if let Some(deref_fn) = deref_trait - .trait_items(self.db) - .method_by_name(&Name::new_symbol_root(sym::deref_mut)) - { - break 'b deref_fn == f; - } - } + && let Some(deref_fn) = deref_trait + .trait_items(self.db) + .method_by_name(&Name::new_symbol_root(sym::deref_mut)) + { + break 'b deref_fn == f; } false }, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index 3325226b1d369..0440d85022321 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -317,27 +317,26 @@ impl MirLowerCtx<'_> { (current, current_else) = self.pattern_match_inner(current, current_else, next_place, pat, mode)?; } - if let &Some(slice) = slice { - if mode != MatchingMode::Check { - if let Pat::Bind { id, subpat: _ } = self.body[slice] { - let next_place = cond_place.project( - ProjectionElem::Subslice { - from: prefix.len() as u64, - to: suffix.len() as u64, - }, - &mut self.result.projection_store, - ); - let mode = self.infer.binding_modes[slice]; - (current, current_else) = self.pattern_match_binding( - id, - mode, - next_place, - (slice).into(), - current, - current_else, - )?; - } - } + if let &Some(slice) = slice + && mode != MatchingMode::Check + && let Pat::Bind { id, subpat: _ } = self.body[slice] + { + let next_place = cond_place.project( + ProjectionElem::Subslice { + from: prefix.len() as u64, + to: suffix.len() as u64, + }, + &mut self.result.projection_store, + ); + let mode = self.infer.binding_modes[slice]; + (current, current_else) = self.pattern_match_binding( + id, + mode, + next_place, + (slice).into(), + current, + current_else, + )?; } for (i, &pat) in suffix.iter().enumerate() { let next_place = cond_place.project( @@ -391,10 +390,10 @@ impl MirLowerCtx<'_> { return Ok((current, current_else)); } let (c, subst) = 'b: { - if let Some(x) = self.infer.assoc_resolutions_for_pat(pattern) { - if let AssocItemId::ConstId(c) = x.0 { - break 'b (c, x.1); - } + if let Some(x) = self.infer.assoc_resolutions_for_pat(pattern) + && let AssocItemId::ConstId(c) = x.0 + { + break 'b (c, x.1); } if let ResolveValueResult::ValueNs(ValueNs::ConstId(c), _) = pr { break 'b (c, Substitution::empty(Interner)); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs index b5de0e52f5b63..775136dc0cbf7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs @@ -149,7 +149,7 @@ impl TestDB { .into_iter() .filter_map(|file_id| { let text = self.file_text(file_id.file_id(self)); - let annotations = extract_annotations(&text.text(self)); + let annotations = extract_annotations(text.text(self)); if annotations.is_empty() { return None; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs index 7414b4fc6070e..08b9d242e71d2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs @@ -125,11 +125,10 @@ pub(crate) fn trait_solve_query( alias: AliasTy::Projection(projection_ty), .. }))) = &goal.value.goal.data(Interner) + && let TyKind::BoundVar(_) = projection_ty.self_type_parameter(db).kind(Interner) { - if let TyKind::BoundVar(_) = projection_ty.self_type_parameter(db).kind(Interner) { - // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible - return Some(Solution::Ambig(Guidance::Unknown)); - } + // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible + return Some(Solution::Ambig(Guidance::Unknown)); } // Chalk see `UnevaluatedConst` as a unique concrete value, but we see it as an alias for another const. So diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index d07c1aa33b407..209ec7926e825 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -333,13 +333,13 @@ impl FallibleTypeFolder for UnevaluatedConstEvaluatorFolder<'_> { constant: Const, _outer_binder: DebruijnIndex, ) -> Result { - if let chalk_ir::ConstValue::Concrete(c) = &constant.data(Interner).value { - if let ConstScalar::UnevaluatedConst(id, subst) = &c.interned { - if let Ok(eval) = self.db.const_eval(*id, subst.clone(), None) { - return Ok(eval); - } else { - return Ok(unknown_const(constant.data(Interner).ty.clone())); - } + if let chalk_ir::ConstValue::Concrete(c) = &constant.data(Interner).value + && let ConstScalar::UnevaluatedConst(id, subst) = &c.interned + { + if let Ok(eval) = self.db.const_eval(*id, subst.clone(), None) { + return Ok(eval); + } else { + return Ok(unknown_const(constant.data(Interner).ty.clone())); } } Ok(constant) diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index c1e814ec223e3..fca0162765ecf 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -604,13 +604,13 @@ impl<'db> AnyDiagnostic<'db> { } } BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr } => { - if let Ok(source_ptr) = source_map.expr_syntax(if_expr) { - if let Some(ptr) = source_ptr.value.cast::() { - return Some( - RemoveUnnecessaryElse { if_expr: InFile::new(source_ptr.file_id, ptr) } - .into(), - ); - } + if let Ok(source_ptr) = source_map.expr_syntax(if_expr) + && let Some(ptr) = source_ptr.value.cast::() + { + return Some( + RemoveUnnecessaryElse { if_expr: InFile::new(source_ptr.file_id, ptr) } + .into(), + ); } } } diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 4ddb04b24f7f2..a323f97997c68 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -1020,21 +1020,21 @@ fn emit_macro_def_diagnostics<'db>( m: Macro, ) { let id = db.macro_def(m.id); - if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) { - if let Some(e) = expander.mac.err() { - let Some(ast) = id.ast_id().left() else { - never!("declarative expander for non decl-macro: {:?}", e); - return; - }; - let krate = HasModule::krate(&m.id, db); - let edition = krate.data(db).edition; - emit_def_diagnostic_( - db, - acc, - &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() }, - edition, - ); - } + if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) + && let Some(e) = expander.mac.err() + { + let Some(ast) = id.ast_id().left() else { + never!("declarative expander for non decl-macro: {:?}", e); + return; + }; + let krate = HasModule::krate(&m.id, db); + let edition = krate.data(db).edition; + emit_def_diagnostic_( + db, + acc, + &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() }, + edition, + ); } } @@ -2564,10 +2564,10 @@ impl<'db> Param<'db> { Callee::Closure(closure, _) => { let c = db.lookup_intern_closure(closure.into()); let body = db.body(c.0); - if let Expr::Closure { args, .. } = &body[c.1] { - if let Pat::Bind { id, .. } = &body[args[self.idx]] { - return Some(Local { parent: c.0, binding_id: *id }); - } + if let Expr::Closure { args, .. } = &body[c.1] + && let Pat::Bind { id, .. } = &body[args[self.idx]] + { + return Some(Local { parent: c.0, binding_id: *id }); } None } @@ -2761,26 +2761,20 @@ impl EvaluatedConst { pub fn render_debug(&self, db: &dyn HirDatabase) -> Result { let data = self.const_.data(Interner); - if let TyKind::Scalar(s) = data.ty.kind(Interner) { - if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) { - if let hir_ty::ConstValue::Concrete(c) = &data.value { - if let hir_ty::ConstScalar::Bytes(b, _) = &c.interned { - let value = u128::from_le_bytes(mir::pad16(b, false)); - let value_signed = - i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_)))); - let mut result = if let Scalar::Int(_) = s { - value_signed.to_string() - } else { - value.to_string() - }; - if value >= 10 { - format_to!(result, " ({value:#X})"); - return Ok(result); - } else { - return Ok(result); - } - } - } + if let TyKind::Scalar(s) = data.ty.kind(Interner) + && matches!(s, Scalar::Int(_) | Scalar::Uint(_)) + && let hir_ty::ConstValue::Concrete(c) = &data.value + && let hir_ty::ConstScalar::Bytes(b, _) = &c.interned + { + let value = u128::from_le_bytes(mir::pad16(b, false)); + let value_signed = i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_)))); + let mut result = + if let Scalar::Int(_) = s { value_signed.to_string() } else { value.to_string() }; + if value >= 10 { + format_to!(result, " ({value:#X})"); + return Ok(result); + } else { + return Ok(result); } } mir::render_const_using_debug_impl(db, self.def, &self.const_) @@ -4421,10 +4415,10 @@ impl Impl { let impls = db.trait_impls_in_crate(id); all.extend(impls.for_trait(trait_.id).map(Self::from)) } - if let Some(block) = module.id.containing_block() { - if let Some(trait_impls) = db.trait_impls_in_block(block) { - all.extend(trait_impls.for_trait(trait_.id).map(Self::from)); - } + if let Some(block) = module.id.containing_block() + && let Some(trait_impls) = db.trait_impls_in_block(block) + { + all.extend(trait_impls.for_trait(trait_.id).map(Self::from)); } all } diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index adba59236a40f..d207305b4c61f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -933,19 +933,18 @@ impl<'db> SemanticsImpl<'db> { InFile::new(file.file_id, last), false, &mut |InFile { value: last, file_id: last_fid }, _ctx| { - if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { - if first_fid == last_fid { - if let Some(p) = first.parent() { - let range = first.text_range().cover(last.text_range()); - let node = find_root(&p) - .covering_element(range) - .ancestors() - .take_while(|it| it.text_range() == range) - .find_map(N::cast); - if let Some(node) = node { - res.push(node); - } - } + if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() + && first_fid == last_fid + && let Some(p) = first.parent() + { + let range = first.text_range().cover(last.text_range()); + let node = find_root(&p) + .covering_element(range) + .ancestors() + .take_while(|it| it.text_range() == range) + .find_map(N::cast); + if let Some(node) = node { + res.push(node); } } }, @@ -1391,10 +1390,10 @@ impl<'db> SemanticsImpl<'db> { } })() .is_none(); - if was_not_remapped { - if let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) { - return Some(b); - } + if was_not_remapped + && let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) + { + return Some(b); } } } @@ -2068,14 +2067,12 @@ impl<'db> SemanticsImpl<'db> { break false; } - if let Some(parent) = ast::Expr::cast(parent.clone()) { - if let Some(ExprOrPatId::ExprId(expr_id)) = + if let Some(parent) = ast::Expr::cast(parent.clone()) + && let Some(ExprOrPatId::ExprId(expr_id)) = source_map.node_expr(InFile { file_id, value: &parent }) - { - if let Expr::Unsafe { .. } = body[expr_id] { - break true; - } - } + && let Expr::Unsafe { .. } = body[expr_id] + { + break true; } let Some(parent_) = parent.parent() else { break false }; @@ -2354,32 +2351,30 @@ struct RenameConflictsVisitor<'a> { impl RenameConflictsVisitor<'_> { fn resolve_path(&mut self, node: ExprOrPatId, path: &Path) { - if let Path::BarePath(path) = path { - if let Some(name) = path.as_ident() { - if *name.symbol() == self.new_name { - if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed( - self.db, - name, - path, - self.body.expr_or_pat_path_hygiene(node), - self.to_be_renamed, - ) { - self.conflicts.insert(conflicting); - } - } else if *name.symbol() == self.old_name { - if let Some(conflicting) = - self.resolver.rename_will_conflict_with_another_variable( - self.db, - name, - path, - self.body.expr_or_pat_path_hygiene(node), - &self.new_name, - self.to_be_renamed, - ) - { - self.conflicts.insert(conflicting); - } + if let Path::BarePath(path) = path + && let Some(name) = path.as_ident() + { + if *name.symbol() == self.new_name { + if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed( + self.db, + name, + path, + self.body.expr_or_pat_path_hygiene(node), + self.to_be_renamed, + ) { + self.conflicts.insert(conflicting); } + } else if *name.symbol() == self.old_name + && let Some(conflicting) = self.resolver.rename_will_conflict_with_another_variable( + self.db, + name, + path, + self.body.expr_or_pat_path_hygiene(node), + &self.new_name, + self.to_be_renamed, + ) + { + self.conflicts.insert(conflicting); } } } diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index 0b554a9d4e37a..d25fb1d8cdb7e 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -995,11 +995,11 @@ impl<'db> SourceAnalyzer<'db> { // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are // trying to resolve foo::bar. - if let Some(use_tree) = parent().and_then(ast::UseTree::cast) { - if use_tree.coloncolon_token().is_some() { - return resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &store) - .map(|it| (it, None)); - } + if let Some(use_tree) = parent().and_then(ast::UseTree::cast) + && use_tree.coloncolon_token().is_some() + { + return resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &store) + .map(|it| (it, None)); } let meta_path = path @@ -1035,24 +1035,19 @@ impl<'db> SourceAnalyzer<'db> { // } // ``` Some(it) if matches!(it, PathResolution::Def(ModuleDef::BuiltinType(_))) => { - if let Some(mod_path) = hir_path.mod_path() { - if let Some(ModuleDefId::ModuleId(id)) = + if let Some(mod_path) = hir_path.mod_path() + && let Some(ModuleDefId::ModuleId(id)) = self.resolver.resolve_module_path_in_items(db, mod_path).take_types() + { + let parent_hir_name = parent_hir_path.segments().get(1).map(|it| it.name); + let module = crate::Module { id }; + if module + .scope(db, None) + .into_iter() + .any(|(name, _)| Some(&name) == parent_hir_name) { - let parent_hir_name = - parent_hir_path.segments().get(1).map(|it| it.name); - let module = crate::Module { id }; - if module - .scope(db, None) - .into_iter() - .any(|(name, _)| Some(&name) == parent_hir_name) - { - return Some(( - PathResolution::Def(ModuleDef::Module(module)), - None, - )); - }; - } + return Some((PathResolution::Def(ModuleDef::Module(module)), None)); + }; } Some((it, None)) } @@ -1282,22 +1277,22 @@ impl<'db> SourceAnalyzer<'db> { db: &'db dyn HirDatabase, macro_expr: InFile<&ast::MacroExpr>, ) -> bool { - if let Some((def, body, sm, Some(infer))) = self.body_() { - if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr) { - let mut is_unsafe = false; - let mut walk_expr = |expr_id| { - unsafe_operations(db, infer, def, body, expr_id, &mut |inside_unsafe_block| { - is_unsafe |= inside_unsafe_block == InsideUnsafeBlock::No - }) - }; - match expanded_expr { - ExprOrPatId::ExprId(expanded_expr) => walk_expr(expanded_expr), - ExprOrPatId::PatId(expanded_pat) => { - body.walk_exprs_in_pat(expanded_pat, &mut walk_expr) - } + if let Some((def, body, sm, Some(infer))) = self.body_() + && let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr) + { + let mut is_unsafe = false; + let mut walk_expr = |expr_id| { + unsafe_operations(db, infer, def, body, expr_id, &mut |inside_unsafe_block| { + is_unsafe |= inside_unsafe_block == InsideUnsafeBlock::No + }) + }; + match expanded_expr { + ExprOrPatId::ExprId(expanded_expr) => walk_expr(expanded_expr), + ExprOrPatId::PatId(expanded_pat) => { + body.walk_exprs_in_pat(expanded_pat, &mut walk_expr) } - return is_unsafe; } + return is_unsafe; } false } @@ -1575,12 +1570,11 @@ fn resolve_hir_path_( // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type // within the trait's associated types. - if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) { - if let Some(type_alias_id) = + if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) + && let Some(type_alias_id) = trait_id.trait_items(db).associated_type_by_name(unresolved.name) - { - return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); - } + { + return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); } let res = match ty { @@ -1726,12 +1720,11 @@ fn resolve_hir_path_qualifier( // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type // within the trait's associated types. - if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) { - if let Some(type_alias_id) = + if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) + && let Some(type_alias_id) = trait_id.trait_items(db).associated_type_by_name(unresolved.name) - { - return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); - } + { + return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); } let res = match ty { diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search.rs b/src/tools/rust-analyzer/crates/hir/src/term_search.rs index 4b354e640628d..e4089218305ce 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search.rs @@ -122,10 +122,10 @@ impl<'db> LookupTable<'db> { } // Collapse suggestions if there are many - if let Some(res) = &res { - if res.len() > self.many_threshold { - return Some(vec![Expr::Many(ty.clone())]); - } + if let Some(res) = &res + && res.len() > self.many_threshold + { + return Some(vec![Expr::Many(ty.clone())]); } res @@ -160,10 +160,10 @@ impl<'db> LookupTable<'db> { } // Collapse suggestions if there are many - if let Some(res) = &res { - if res.len() > self.many_threshold { - return Some(vec![Expr::Many(ty.clone())]); - } + if let Some(res) = &res + && res.len() > self.many_threshold + { + return Some(vec![Expr::Many(ty.clone())]); } res diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs index 843831948adc8..78f534d014b90 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs @@ -336,10 +336,10 @@ impl<'db> Expr<'db> { if let Expr::Method { func, params, .. } = self { res.extend(params.iter().flat_map(|it| it.traits_used(db))); - if let Some(it) = func.as_assoc_item(db) { - if let Some(it) = it.container_or_implemented_trait(db) { - res.push(it); - } + if let Some(it) = func.as_assoc_item(db) + && let Some(it) = it.container_or_implemented_trait(db) + { + res.push(it); } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs index dcdc7ea9cdced..27dbdcf2c4d57 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs @@ -82,10 +82,10 @@ fn fetch_borrowed_types(node: &ast::Adt) -> Option> { record_field_list .fields() .filter_map(|r_field| { - if let ast::Type::RefType(ref_type) = r_field.ty()? { - if ref_type.lifetime().is_none() { - return Some(ref_type); - } + if let ast::Type::RefType(ref_type) = r_field.ty()? + && ref_type.lifetime().is_none() + { + return Some(ref_type); } None @@ -102,10 +102,10 @@ fn find_ref_types_from_field_list(field_list: &ast::FieldList) -> Option record_list .fields() .filter_map(|f| { - if let ast::Type::RefType(ref_type) = f.ty()? { - if ref_type.lifetime().is_none() { - return Some(ref_type); - } + if let ast::Type::RefType(ref_type) = f.ty()? + && ref_type.lifetime().is_none() + { + return Some(ref_type); } None @@ -114,10 +114,10 @@ fn find_ref_types_from_field_list(field_list: &ast::FieldList) -> Option tuple_field_list .fields() .filter_map(|f| { - if let ast::Type::RefType(ref_type) = f.ty()? { - if ref_type.lifetime().is_none() { - return Some(ref_type); - } + if let ast::Type::RefType(ref_type) = f.ty()? + && ref_type.lifetime().is_none() + { + return Some(ref_type); } None diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs index ab183ac70895f..11201afb8a7f2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -183,13 +183,11 @@ fn add_missing_impl_members_inner( .clone() .into_iter() .chain(other_items.iter().cloned()) - .map(either::Either::Right) .collect::>(); let mut editor = edit.make_editor(impl_def.syntax()); if let Some(assoc_item_list) = impl_def.assoc_item_list() { - let items = new_assoc_items.into_iter().filter_map(either::Either::right).collect(); - assoc_item_list.add_items(&mut editor, items); + assoc_item_list.add_items(&mut editor, new_assoc_items); } else { let assoc_item_list = make::assoc_item_list(Some(new_assoc_items)).clone_for_update(); editor.insert_all( @@ -201,14 +199,12 @@ fn add_missing_impl_members_inner( if let Some(cap) = ctx.config.snippet_cap { let mut placeholder = None; - if let DefaultMethods::No = mode { - if let Some(ast::AssocItem::Fn(func)) = &first_new_item { - if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) - && m.syntax().text() == "todo!()" - { - placeholder = Some(m); - } - } + if let DefaultMethods::No = mode + && let Some(ast::AssocItem::Fn(func)) = &first_new_item + && let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) + && m.syntax().text() == "todo!()" + { + placeholder = Some(m); } if let Some(macro_call) = placeholder { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs index 3b447d1f6d572..753a9e56c35ac 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs @@ -207,10 +207,10 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_> // negate all tail expressions in the closure body let tail_cb = &mut |e: &_| tail_cb_impl(&mut editor, &make, e); walk_expr(&closure_body, &mut |expr| { - if let ast::Expr::ReturnExpr(ret_expr) = expr { - if let Some(ret_expr_arg) = &ret_expr.expr() { - for_each_tail_expr(ret_expr_arg, tail_cb); - } + if let ast::Expr::ReturnExpr(ret_expr) = expr + && let Some(ret_expr_arg) = &ret_expr.expr() + { + for_each_tail_expr(ret_expr_arg, tail_cb); } }); for_each_tail_expr(&closure_body, tail_cb); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs index d7b7e8d9cad07..9d5d3f223707a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs @@ -86,12 +86,11 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_> e @ ast::Expr::CallExpr(_) => Some(e.clone()), _ => None, }; - if let Some(ast::Expr::CallExpr(call)) = e { - if let Some(arg_list) = call.arg_list() { - if let Some(arg) = arg_list.args().next() { - editor.replace(call.syntax(), arg.syntax()); - } - } + if let Some(ast::Expr::CallExpr(call)) = e + && let Some(arg_list) = call.arg_list() + && let Some(arg) = arg_list.args().next() + { + editor.replace(call.syntax(), arg.syntax()); } }); let edit = editor.finish(); @@ -276,12 +275,12 @@ fn is_invalid_body( e @ ast::Expr::CallExpr(_) => Some(e.clone()), _ => None, }; - if let Some(ast::Expr::CallExpr(call)) = e { - if let Some(ast::Expr::PathExpr(p)) = call.expr() { - let res = p.path().and_then(|p| sema.resolve_path(&p)); - if let Some(hir::PathResolution::Def(hir::ModuleDef::Variant(v))) = res { - return invalid |= v != some_variant; - } + if let Some(ast::Expr::CallExpr(call)) = e + && let Some(ast::Expr::PathExpr(p)) = call.expr() + { + let res = p.path().and_then(|p| sema.resolve_path(&p)); + if let Some(hir::PathResolution::Def(hir::ModuleDef::Variant(v))) = res { + return invalid |= v != some_variant; } } invalid = true diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs index 43515de71e20d..916bb67ebb405 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs @@ -101,21 +101,21 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>) // but we need to locate `AstPtr`s inside the body. let mut wrap_body_in_block = true; if let ast::Expr::BlockExpr(block) = &body { - if let Some(async_token) = block.async_token() { - if !is_async { - is_async = true; - ret_ty = ret_ty.future_output(ctx.db())?; - let token_idx = async_token.index(); - let whitespace_tokens_after_count = async_token - .siblings_with_tokens(Direction::Next) - .skip(1) - .take_while(|token| token.kind() == SyntaxKind::WHITESPACE) - .count(); - body.syntax().splice_children( - token_idx..token_idx + whitespace_tokens_after_count + 1, - Vec::new(), - ); - } + if let Some(async_token) = block.async_token() + && !is_async + { + is_async = true; + ret_ty = ret_ty.future_output(ctx.db())?; + let token_idx = async_token.index(); + let whitespace_tokens_after_count = async_token + .siblings_with_tokens(Direction::Next) + .skip(1) + .take_while(|token| token.kind() == SyntaxKind::WHITESPACE) + .count(); + body.syntax().splice_children( + token_idx..token_idx + whitespace_tokens_after_count + 1, + Vec::new(), + ); } if let Some(gen_token) = block.gen_token() { is_gen = true; @@ -513,10 +513,10 @@ fn capture_as_arg(ctx: &AssistContext<'_>, capture: &ClosureCapture) -> ast::Exp CaptureKind::MutableRef | CaptureKind::UniqueSharedRef => true, CaptureKind::Move => return place, }; - if let ast::Expr::PrefixExpr(expr) = &place { - if expr.op_kind() == Some(ast::UnaryOp::Deref) { - return expr.expr().expect("`display_place_source_code()` produced an invalid expr"); - } + if let ast::Expr::PrefixExpr(expr) = &place + && expr.op_kind() == Some(ast::UnaryOp::Deref) + { + return expr.expr().expect("`display_place_source_code()` produced an invalid expr"); } make::expr_ref(place, needs_mut) } @@ -642,11 +642,11 @@ fn peel_blocks_and_refs_and_parens(mut expr: ast::Expr) -> ast::Expr { expr = ast::Expr::cast(parent).unwrap(); continue; } - if let Some(stmt_list) = ast::StmtList::cast(parent) { - if let Some(block) = stmt_list.syntax().parent().and_then(ast::BlockExpr::cast) { - expr = ast::Expr::BlockExpr(block); - continue; - } + if let Some(stmt_list) = ast::StmtList::cast(parent) + && let Some(block) = stmt_list.syntax().parent().and_then(ast::BlockExpr::cast) + { + expr = ast::Expr::BlockExpr(block); + continue; } break; } @@ -662,12 +662,11 @@ fn expr_of_pat(pat: ast::Pat) -> Option { if let Some(let_stmt) = ast::LetStmt::cast(ancestor.clone()) { break 'find_expr let_stmt.initializer(); } - if ast::MatchArm::can_cast(ancestor.kind()) { - if let Some(match_) = + if ast::MatchArm::can_cast(ancestor.kind()) + && let Some(match_) = ancestor.parent().and_then(|it| it.parent()).and_then(ast::MatchExpr::cast) - { - break 'find_expr match_.expr(); - } + { + break 'find_expr match_.expr(); } if ast::ExprStmt::can_cast(ancestor.kind()) { break; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs index db41927f1df2f..f1cc3d90b9c56 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs @@ -1,9 +1,7 @@ use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait}; -use itertools::Itertools; -use syntax::{ - ast::{self, AstNode, HasGenericArgs, HasName, make}, - ted, -}; +use syntax::ast::edit::IndentLevel; +use syntax::ast::{self, AstNode, HasGenericArgs, HasName, make}; +use syntax::syntax_editor::{Element, Position}; use crate::{AssistContext, AssistId, Assists}; @@ -49,11 +47,12 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_> }; let associated_items = impl_.assoc_item_list()?; + let associated_l_curly = associated_items.l_curly_token()?; let from_fn = associated_items.assoc_items().find_map(|item| { - if let ast::AssocItem::Fn(f) = item { - if f.name()?.text() == "from" { - return Some(f); - } + if let ast::AssocItem::Fn(f) = item + && f.name()?.text() == "from" + { + return Some(f); }; None })?; @@ -75,30 +74,25 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_> "Convert From to TryFrom", impl_.syntax().text_range(), |builder| { - let trait_ty = builder.make_mut(trait_ty); - let from_fn_return_type = builder.make_mut(from_fn_return_type); - let from_fn_name = builder.make_mut(from_fn_name); - let tail_expr = builder.make_mut(tail_expr); - let return_exprs = return_exprs.map(|r| builder.make_mut(r)).collect_vec(); - let associated_items = builder.make_mut(associated_items); - - ted::replace( + let mut editor = builder.make_editor(impl_.syntax()); + editor.replace( trait_ty.syntax(), make::ty(&format!("TryFrom<{from_type}>")).syntax().clone_for_update(), ); - ted::replace( + editor.replace( from_fn_return_type.syntax(), make::ty("Result").syntax().clone_for_update(), ); - ted::replace(from_fn_name.syntax(), make::name("try_from").syntax().clone_for_update()); - ted::replace( + editor + .replace(from_fn_name.syntax(), make::name("try_from").syntax().clone_for_update()); + editor.replace( tail_expr.syntax(), wrap_ok(tail_expr.clone()).syntax().clone_for_update(), ); for r in return_exprs { let t = r.expr().unwrap_or_else(make::ext::expr_unit); - ted::replace(t.syntax(), wrap_ok(t.clone()).syntax().clone_for_update()); + editor.replace(t.syntax(), wrap_ok(t.clone()).syntax().clone_for_update()); } let error_type = ast::AssocItem::TypeAlias(make::ty_alias( @@ -110,15 +104,24 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_> )) .clone_for_update(); - if let Some(cap) = ctx.config.snippet_cap { - if let ast::AssocItem::TypeAlias(type_alias) = &error_type { - if let Some(ty) = type_alias.ty() { - builder.add_placeholder_snippet(cap, ty); - } - } + if let Some(cap) = ctx.config.snippet_cap + && let ast::AssocItem::TypeAlias(type_alias) = &error_type + && let Some(ty) = type_alias.ty() + { + let placeholder = builder.make_placeholder_snippet(cap); + editor.add_annotation(ty.syntax(), placeholder); } - associated_items.add_item_at_start(error_type); + let indent = IndentLevel::from_token(&associated_l_curly) + 1; + editor.insert_all( + Position::after(associated_l_curly), + vec![ + make::tokens::whitespace(&format!("\n{indent}")).syntax_element(), + error_type.syntax().syntax_element(), + make::tokens::whitespace("\n").syntax_element(), + ], + ); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs index b80276a95fbf5..3d9cde0e0a67c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs @@ -65,10 +65,10 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) - }; let into_fn = impl_.assoc_item_list()?.assoc_items().find_map(|item| { - if let ast::AssocItem::Fn(f) = item { - if f.name()?.text() == "into" { - return Some(f); - } + if let ast::AssocItem::Fn(f) = item + && f.name()?.text() == "into" + { + return Some(f); }; None })?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index cca4cb9d8f775..247c1011589bb 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -265,10 +265,10 @@ fn replace_body_return_values(body: ast::Expr, struct_name: &str) { let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e); walk_expr(&body, &mut |expr| { - if let ast::Expr::ReturnExpr(ret_expr) = expr { - if let Some(ret_expr_arg) = &ret_expr.expr() { - for_each_tail_expr(ret_expr_arg, tail_cb); - } + if let ast::Expr::ReturnExpr(ret_expr) = expr + && let Some(ret_expr_arg) = &ret_expr.expr() + { + for_each_tail_expr(ret_expr_arg, tail_cb); } }); for_each_tail_expr(&body, tail_cb); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 80756197fb700..3d78895477b31 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -1,9 +1,14 @@ use either::Either; +use hir::FileRangeWrapper; use ide_db::defs::{Definition, NameRefClass}; +use std::ops::RangeInclusive; use syntax::{ - SyntaxKind, SyntaxNode, - ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility}, - match_ast, ted, + SyntaxElement, SyntaxKind, SyntaxNode, T, TextSize, + ast::{ + self, AstNode, HasAttrs, HasGenericParams, HasVisibility, syntax_factory::SyntaxFactory, + }, + match_ast, + syntax_editor::{Element, Position, SyntaxEditor}, }; use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder}; @@ -71,7 +76,7 @@ pub(crate) fn convert_tuple_struct_to_named_struct( Either::Right(v) => Either::Right(ctx.sema.to_def(v)?), }; let target = strukt_or_variant.as_ref().either(|s| s.syntax(), |v| v.syntax()).text_range(); - + let syntax = strukt_or_variant.as_ref().either(|s| s.syntax(), |v| v.syntax()); acc.add( AssistId::refactor_rewrite("convert_tuple_struct_to_named_struct"), "Convert to named struct", @@ -79,58 +84,55 @@ pub(crate) fn convert_tuple_struct_to_named_struct( |edit| { let names = generate_names(tuple_fields.fields()); edit_field_references(ctx, edit, tuple_fields.fields(), &names); + let mut editor = edit.make_editor(syntax); edit_struct_references(ctx, edit, strukt_def, &names); - edit_struct_def(ctx, edit, &strukt_or_variant, tuple_fields, names); + edit_struct_def(&mut editor, &strukt_or_variant, tuple_fields, names); + edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) } fn edit_struct_def( - ctx: &AssistContext<'_>, - edit: &mut SourceChangeBuilder, + editor: &mut SyntaxEditor, strukt: &Either, tuple_fields: ast::TupleFieldList, names: Vec, ) { let record_fields = tuple_fields.fields().zip(names).filter_map(|(f, name)| { - let field = ast::make::record_field(f.visibility(), name, f.ty()?).clone_for_update(); - ted::insert_all( - ted::Position::first_child_of(field.syntax()), + let field = ast::make::record_field(f.visibility(), name, f.ty()?); + let mut field_editor = SyntaxEditor::new(field.syntax().clone()); + field_editor.insert_all( + Position::first_child_of(field.syntax()), f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(), ); - Some(field) + ast::RecordField::cast(field_editor.finish().new_root().clone()) }); - let record_fields = ast::make::record_field_list(record_fields); - let tuple_fields_text_range = tuple_fields.syntax().text_range(); - - edit.edit_file(ctx.vfs_file_id()); + let make = SyntaxFactory::without_mappings(); + let record_fields = make.record_field_list(record_fields); + let tuple_fields_before = Position::before(tuple_fields.syntax()); if let Either::Left(strukt) = strukt { if let Some(w) = strukt.where_clause() { - edit.delete(w.syntax().text_range()); - edit.insert( - tuple_fields_text_range.start(), - ast::make::tokens::single_newline().text(), - ); - edit.insert(tuple_fields_text_range.start(), w.syntax().text()); + editor.delete(w.syntax()); + let mut insert_element = Vec::new(); + insert_element.push(ast::make::tokens::single_newline().syntax_element()); + insert_element.push(w.syntax().clone_for_update().syntax_element()); if w.syntax().last_token().is_none_or(|t| t.kind() != SyntaxKind::COMMA) { - edit.insert(tuple_fields_text_range.start(), ","); + insert_element.push(ast::make::token(T![,]).into()); } - edit.insert( - tuple_fields_text_range.start(), - ast::make::tokens::single_newline().text(), - ); + insert_element.push(ast::make::tokens::single_newline().syntax_element()); + editor.insert_all(tuple_fields_before, insert_element); } else { - edit.insert(tuple_fields_text_range.start(), ast::make::tokens::single_space().text()); + editor.insert(tuple_fields_before, ast::make::tokens::single_space()); } if let Some(t) = strukt.semicolon_token() { - edit.delete(t.text_range()); + editor.delete(t); } } else { - edit.insert(tuple_fields_text_range.start(), ast::make::tokens::single_space().text()); + editor.insert(tuple_fields_before, ast::make::tokens::single_space()); } - edit.replace(tuple_fields_text_range, record_fields.to_string()); + editor.replace(tuple_fields.syntax(), record_fields.syntax()); } fn edit_struct_references( @@ -145,27 +147,22 @@ fn edit_struct_references( }; let usages = strukt_def.usages(&ctx.sema).include_self_refs().all(); - let edit_node = |edit: &mut SourceChangeBuilder, node: SyntaxNode| -> Option<()> { + let edit_node = |node: SyntaxNode| -> Option { + let make = SyntaxFactory::without_mappings(); match_ast! { match node { ast::TupleStructPat(tuple_struct_pat) => { - let file_range = ctx.sema.original_range_opt(&node)?; - edit.edit_file(file_range.file_id.file_id(ctx.db())); - edit.replace( - file_range.range, - ast::make::record_pat_with_fields( - tuple_struct_pat.path()?, - ast::make::record_pat_field_list(tuple_struct_pat.fields().zip(names).map( - |(pat, name)| { - ast::make::record_pat_field( - ast::make::name_ref(&name.to_string()), - pat, - ) - }, - ), None), - ) - .to_string(), - ); + Some(make.record_pat_with_fields( + tuple_struct_pat.path()?, + ast::make::record_pat_field_list(tuple_struct_pat.fields().zip(names).map( + |(pat, name)| { + ast::make::record_pat_field( + ast::make::name_ref(&name.to_string()), + pat, + ) + }, + ), None), + ).syntax().clone()) }, // for tuple struct creations like Foo(42) ast::CallExpr(call_expr) => { @@ -181,10 +178,8 @@ fn edit_struct_references( } let arg_list = call_expr.syntax().descendants().find_map(ast::ArgList::cast)?; - - edit.replace( - ctx.sema.original_range(&node).range, - ast::make::record_expr( + Some( + make.record_expr( path, ast::make::record_expr_field_list(arg_list.args().zip(names).map( |(expr, name)| { @@ -194,25 +189,58 @@ fn edit_struct_references( ) }, )), - ) - .to_string(), - ); + ).syntax().clone() + ) }, - _ => return None, + _ => None, } } - Some(()) }; for (file_id, refs) in usages { - edit.edit_file(file_id.file_id(ctx.db())); - for r in refs { - for node in r.name.syntax().ancestors() { - if edit_node(edit, node).is_some() { - break; + let source = ctx.sema.parse(file_id); + let source = source.syntax(); + + let mut editor = edit.make_editor(source); + for r in refs.iter().rev() { + if let Some((old_node, new_node)) = r + .name + .syntax() + .ancestors() + .find_map(|node| Some((node.clone(), edit_node(node.clone())?))) + { + if let Some(old_node) = ctx.sema.original_syntax_node_rooted(&old_node) { + editor.replace(old_node, new_node); + } else { + let FileRangeWrapper { file_id: _, range } = ctx.sema.original_range(&old_node); + let parent = source.covering_element(range); + match parent { + SyntaxElement::Token(token) => { + editor.replace(token, new_node.syntax_element()); + } + SyntaxElement::Node(parent_node) => { + // replace the part of macro + // ``` + // foo!(a, Test::A(0)); + // ^^^^^^^^^^^^^^^ // parent_node + // ^^^^^^^^^^ // replace_range + // ``` + let start = parent_node + .children_with_tokens() + .find(|t| t.text_range().contains(range.start())); + let end = parent_node + .children_with_tokens() + .find(|t| t.text_range().contains(range.end() - TextSize::new(1))); + if let (Some(start), Some(end)) = (start, end) { + let replace_range = RangeInclusive::new(start, end); + editor.replace_all(replace_range, vec![new_node.into()]); + } + } + } } } } + edit.add_file_edits(file_id.file_id(ctx.db()), editor); } } @@ -230,22 +258,28 @@ fn edit_field_references( let def = Definition::Field(field); let usages = def.usages(&ctx.sema).all(); for (file_id, refs) in usages { - edit.edit_file(file_id.file_id(ctx.db())); + let source = ctx.sema.parse(file_id); + let source = source.syntax(); + let mut editor = edit.make_editor(source); for r in refs { - if let Some(name_ref) = r.name.as_name_ref() { - edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text()); + if let Some(name_ref) = r.name.as_name_ref() + && let Some(original) = ctx.sema.original_ast_node(name_ref.clone()) + { + editor.replace(original.syntax(), name.syntax()); } } + edit.add_file_edits(file_id.file_id(ctx.db()), editor); } } } fn generate_names(fields: impl Iterator) -> Vec { + let make = SyntaxFactory::without_mappings(); fields .enumerate() .map(|(i, _)| { let idx = i + 1; - ast::make::name(&format!("field{idx}")) + make.name(&format!("field{idx}")) }) .collect() } @@ -1013,8 +1047,7 @@ where pub struct $0Foo(#[my_custom_attr] u32); "#, r#" -pub struct Foo { #[my_custom_attr] -field1: u32 } +pub struct Foo { #[my_custom_attr]field1: u32 } "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs index e582aa814ae14..1af5db17f0400 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs @@ -100,10 +100,10 @@ fn is_bool_literal_expr( sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr, ) -> Option { - if let ast::Expr::Literal(lit) = expr { - if let ast::LiteralKind::Bool(b) = lit.kind() { - return Some(ArmBodyExpression::Literal(b)); - } + if let ast::Expr::Literal(lit) = expr + && let ast::LiteralKind::Bool(b) = lit.kind() + { + return Some(ArmBodyExpression::Literal(b)); } if !sema.type_of_expr(expr)?.original.is_bool() { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_try_expr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_try_expr.rs index efadde9e3648a..9976e34e730cc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_try_expr.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_try_expr.rs @@ -106,73 +106,73 @@ pub(crate) fn desugar_try_expr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op }, ); - if let Some(let_stmt) = try_expr.syntax().parent().and_then(ast::LetStmt::cast) { - if let_stmt.let_else().is_none() { - let pat = let_stmt.pat()?; - acc.add( - AssistId::refactor_rewrite("desugar_try_expr_let_else"), - "Replace try expression with let else", - target, - |builder| { - let make = SyntaxFactory::with_mappings(); - let mut editor = builder.make_editor(let_stmt.syntax()); + if let Some(let_stmt) = try_expr.syntax().parent().and_then(ast::LetStmt::cast) + && let_stmt.let_else().is_none() + { + let pat = let_stmt.pat()?; + acc.add( + AssistId::refactor_rewrite("desugar_try_expr_let_else"), + "Replace try expression with let else", + target, + |builder| { + let make = SyntaxFactory::with_mappings(); + let mut editor = builder.make_editor(let_stmt.syntax()); - let indent_level = IndentLevel::from_node(let_stmt.syntax()); - let new_let_stmt = make.let_else_stmt( - try_enum.happy_pattern(pat), - let_stmt.ty(), - expr, - make.block_expr( - iter::once( - make.expr_stmt( - make.expr_return(Some(match try_enum { - TryEnum::Option => make.expr_path(make.ident_path("None")), - TryEnum::Result => make - .expr_call( - make.expr_path(make.ident_path("Err")), - make.arg_list(iter::once( - match ctx.config.expr_fill_default { - ExprFillDefaultMode::Todo => make - .expr_macro( - make.ident_path("todo"), - make.token_tree( - syntax::SyntaxKind::L_PAREN, - [], - ), - ) - .into(), - ExprFillDefaultMode::Underscore => { - make.expr_underscore().into() - } - ExprFillDefaultMode::Default => make - .expr_macro( - make.ident_path("todo"), - make.token_tree( - syntax::SyntaxKind::L_PAREN, - [], - ), - ) - .into(), - }, - )), - ) - .into(), - })) - .indent(indent_level + 1) - .into(), - ) + let indent_level = IndentLevel::from_node(let_stmt.syntax()); + let new_let_stmt = make.let_else_stmt( + try_enum.happy_pattern(pat), + let_stmt.ty(), + expr, + make.block_expr( + iter::once( + make.expr_stmt( + make.expr_return(Some(match try_enum { + TryEnum::Option => make.expr_path(make.ident_path("None")), + TryEnum::Result => make + .expr_call( + make.expr_path(make.ident_path("Err")), + make.arg_list(iter::once( + match ctx.config.expr_fill_default { + ExprFillDefaultMode::Todo => make + .expr_macro( + make.ident_path("todo"), + make.token_tree( + syntax::SyntaxKind::L_PAREN, + [], + ), + ) + .into(), + ExprFillDefaultMode::Underscore => { + make.expr_underscore().into() + } + ExprFillDefaultMode::Default => make + .expr_macro( + make.ident_path("todo"), + make.token_tree( + syntax::SyntaxKind::L_PAREN, + [], + ), + ) + .into(), + }, + )), + ) + .into(), + })) + .indent(indent_level + 1) .into(), - ), - None, - ) - .indent(indent_level), - ); - editor.replace(let_stmt.syntax(), new_let_stmt.syntax()); - editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.vfs_file_id(), editor); - }, - ); - } + ) + .into(), + ), + None, + ) + .indent(indent_level), + ); + editor.replace(let_stmt.syntax(), new_let_stmt.syntax()); + editor.add_mappings(make.finish_with_mappings()); + builder.add_file_edits(ctx.vfs_file_id(), editor); + }, + ); } Some(()) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs index 307414c79715a..66552dd65f567 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs @@ -272,16 +272,16 @@ impl Refs { .clone() .into_iter() .filter(|r| { - if let Definition::Trait(tr) = r.def { - if tr.items(ctx.db()).into_iter().any(|ai| { + if let Definition::Trait(tr) = r.def + && tr.items(ctx.db()).into_iter().any(|ai| { if let AssocItem::Function(f) = ai { def_is_referenced_in(Definition::Function(f), ctx) } else { false } - }) { - return true; - } + }) + { + return true; } def_is_referenced_in(r.def, ctx) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs index 00cbef1c01c01..890b8dd64126e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs @@ -175,10 +175,10 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let fn_def = format_function(ctx, module, &fun, old_indent).clone_for_update(); - if let Some(cap) = ctx.config.snippet_cap { - if let Some(name) = fn_def.name() { - builder.add_tabstop_before(cap, name); - } + if let Some(cap) = ctx.config.snippet_cap + && let Some(name) = fn_def.name() + { + builder.add_tabstop_before(cap, name); } let fn_def = match fun.self_param_adt(ctx) { @@ -289,10 +289,10 @@ fn extraction_target(node: &SyntaxNode, selection_range: TextRange) -> Option { let func = sema.to_def(&fn_)?; let mut ret_ty = func.ret_type(sema.db); - if func.is_async(sema.db) { - if let Some(async_ret) = func.async_ret_type(sema.db) { + if func.is_async(sema.db) + && let Some(async_ret) = func.async_ret_type(sema.db) { ret_ty = async_ret; } - } (fn_.const_token().is_some(), fn_.body().map(ast::Expr::BlockExpr), Some(ret_ty)) }, ast::Static(statik) => { @@ -1172,19 +1171,19 @@ impl GenericParent { /// Search `parent`'s ancestors for items with potentially applicable generic parameters fn generic_parents(parent: &SyntaxNode) -> Vec { let mut list = Vec::new(); - if let Some(parent_item) = parent.ancestors().find_map(ast::Item::cast) { - if let ast::Item::Fn(ref fn_) = parent_item { - if let Some(parent_parent) = - parent_item.syntax().parent().and_then(|it| it.parent()).and_then(ast::Item::cast) - { - match parent_parent { - ast::Item::Impl(impl_) => list.push(GenericParent::Impl(impl_)), - ast::Item::Trait(trait_) => list.push(GenericParent::Trait(trait_)), - _ => (), - } + if let Some(parent_item) = parent.ancestors().find_map(ast::Item::cast) + && let ast::Item::Fn(ref fn_) = parent_item + { + if let Some(parent_parent) = + parent_item.syntax().parent().and_then(|it| it.parent()).and_then(ast::Item::cast) + { + match parent_parent { + ast::Item::Impl(impl_) => list.push(GenericParent::Impl(impl_)), + ast::Item::Trait(trait_) => list.push(GenericParent::Trait(trait_)), + _ => (), } - list.push(GenericParent::Fn(fn_.clone())); } + list.push(GenericParent::Fn(fn_.clone())); } list } @@ -1337,10 +1336,10 @@ fn locals_defined_in_body( // see https://github.com/rust-lang/rust-analyzer/pull/7535#discussion_r570048550 let mut res = FxIndexSet::default(); body.walk_pat(&mut |pat| { - if let ast::Pat::IdentPat(pat) = pat { - if let Some(local) = sema.to_def(&pat) { - res.insert(local); - } + if let ast::Pat::IdentPat(pat) = pat + && let Some(local) = sema.to_def(&pat) + { + res.insert(local); } }); res @@ -1445,11 +1444,11 @@ fn impl_type_name(impl_node: &ast::Impl) -> Option { fn fixup_call_site(builder: &mut SourceChangeBuilder, body: &FunctionBody) { let parent_match_arm = body.parent().and_then(ast::MatchArm::cast); - if let Some(parent_match_arm) = parent_match_arm { - if parent_match_arm.comma_token().is_none() { - let parent_match_arm = builder.make_mut(parent_match_arm); - ted::append_child_raw(parent_match_arm.syntax(), make::token(T![,])); - } + if let Some(parent_match_arm) = parent_match_arm + && parent_match_arm.comma_token().is_none() + { + let parent_match_arm = builder.make_mut(parent_match_arm); + ted::append_child_raw(parent_match_arm.syntax(), make::token(T![,])); } } @@ -2120,30 +2119,30 @@ fn update_external_control_flow(handler: &FlowHandler<'_>, syntax: &SyntaxNode) _ => {} }, WalkEvent::Leave(e) => { - if nested_scope.is_none() { - if let Some(expr) = ast::Expr::cast(e.clone()) { - match expr { - ast::Expr::ReturnExpr(return_expr) => { - let expr = return_expr.expr(); - if let Some(replacement) = make_rewritten_flow(handler, expr) { - ted::replace(return_expr.syntax(), replacement.syntax()) - } - } - ast::Expr::BreakExpr(break_expr) if nested_loop.is_none() => { - let expr = break_expr.expr(); - if let Some(replacement) = make_rewritten_flow(handler, expr) { - ted::replace(break_expr.syntax(), replacement.syntax()) - } + if nested_scope.is_none() + && let Some(expr) = ast::Expr::cast(e.clone()) + { + match expr { + ast::Expr::ReturnExpr(return_expr) => { + let expr = return_expr.expr(); + if let Some(replacement) = make_rewritten_flow(handler, expr) { + ted::replace(return_expr.syntax(), replacement.syntax()) } - ast::Expr::ContinueExpr(continue_expr) if nested_loop.is_none() => { - if let Some(replacement) = make_rewritten_flow(handler, None) { - ted::replace(continue_expr.syntax(), replacement.syntax()) - } + } + ast::Expr::BreakExpr(break_expr) if nested_loop.is_none() => { + let expr = break_expr.expr(); + if let Some(replacement) = make_rewritten_flow(handler, expr) { + ted::replace(break_expr.syntax(), replacement.syntax()) } - _ => { - // do nothing + } + ast::Expr::ContinueExpr(continue_expr) if nested_loop.is_none() => { + if let Some(replacement) = make_rewritten_flow(handler, None) { + ted::replace(continue_expr.syntax(), replacement.syntax()) } } + _ => { + // do nothing + } } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs index b82b7984d4a45..c6a6b97df8245 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs @@ -69,13 +69,12 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti let mut impl_parent: Option = None; let mut impl_child_count: usize = 0; - if let Some(parent_assoc_list) = node.parent() { - if let Some(parent_impl) = parent_assoc_list.parent() { - if let Some(impl_) = ast::Impl::cast(parent_impl) { - impl_child_count = parent_assoc_list.children().count(); - impl_parent = Some(impl_); - } - } + if let Some(parent_assoc_list) = node.parent() + && let Some(parent_impl) = parent_assoc_list.parent() + && let Some(impl_) = ast::Impl::cast(parent_impl) + { + impl_child_count = parent_assoc_list.children().count(); + impl_parent = Some(impl_); } let mut curr_parent_module: Option = None; @@ -436,10 +435,10 @@ impl Module { } }) .for_each(|(node, def)| { - if node_set.insert(node.to_string()) { - if let Some(import) = self.process_def_in_sel(def, &node, &module, ctx) { - check_intersection_and_push(&mut imports_to_remove, import); - } + if node_set.insert(node.to_string()) + && let Some(import) = self.process_def_in_sel(def, &node, &module, ctx) + { + check_intersection_and_push(&mut imports_to_remove, import); } }) } @@ -542,15 +541,16 @@ impl Module { import_path_to_be_removed = Some(text_range); } - if def_in_mod && def_out_sel { - if let Some(first_path_in_use_tree) = use_tree_str.last() { - let first_path_in_use_tree_str = first_path_in_use_tree.to_string(); - if !first_path_in_use_tree_str.contains("super") - && !first_path_in_use_tree_str.contains("crate") - { - let super_path = make::ext::ident_path("super"); - use_tree_str.push(super_path); - } + if def_in_mod + && def_out_sel + && let Some(first_path_in_use_tree) = use_tree_str.last() + { + let first_path_in_use_tree_str = first_path_in_use_tree.to_string(); + if !first_path_in_use_tree_str.contains("super") + && !first_path_in_use_tree_str.contains("crate") + { + let super_path = make::ext::ident_path("super"); + use_tree_str.push(super_path); } } @@ -563,12 +563,11 @@ impl Module { if let Some(mut use_tree_paths) = use_tree_paths { use_tree_paths.reverse(); - if uses_exist_out_sel || !uses_exist_in_sel || !def_in_mod || !def_out_sel { - if let Some(first_path_in_use_tree) = use_tree_paths.first() { - if first_path_in_use_tree.to_string().contains("super") { - use_tree_paths.insert(0, make::ext::ident_path("super")); - } - } + if (uses_exist_out_sel || !uses_exist_in_sel || !def_in_mod || !def_out_sel) + && let Some(first_path_in_use_tree) = use_tree_paths.first() + && first_path_in_use_tree.to_string().contains("super") + { + use_tree_paths.insert(0, make::ext::ident_path("super")); } let is_item = matches!( @@ -691,11 +690,9 @@ fn check_def_in_mod_and_out_sel( _ => source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id, }; - if have_same_parent { - if let ModuleSource::Module(module_) = source.value { - let in_sel = !selection_range.contains_range(module_.syntax().text_range()); - return (have_same_parent, in_sel); - } + if have_same_parent && let ModuleSource::Module(module_) = source.value { + let in_sel = !selection_range.contains_range(module_.syntax().text_range()); + return (have_same_parent, in_sel); } return (have_same_parent, false); @@ -772,12 +769,12 @@ fn get_use_tree_paths_from_path( .filter(|x| x.to_string() != path.to_string()) .filter_map(ast::UseTree::cast) .find_map(|use_tree| { - if let Some(upper_tree_path) = use_tree.path() { - if upper_tree_path.to_string() != path.to_string() { - use_tree_str.push(upper_tree_path.clone()); - get_use_tree_paths_from_path(upper_tree_path, use_tree_str); - return Some(use_tree); - } + if let Some(upper_tree_path) = use_tree.path() + && upper_tree_path.to_string() != path.to_string() + { + use_tree_str.push(upper_tree_path.clone()); + get_use_tree_paths_from_path(upper_tree_path, use_tree_str); + return Some(use_tree); } None })?; @@ -786,11 +783,11 @@ fn get_use_tree_paths_from_path( } fn add_change_vis(vis: Option, node_or_token_opt: Option) { - if vis.is_none() { - if let Some(node_or_token) = node_or_token_opt { - let pub_crate_vis = make::visibility_pub_crate().clone_for_update(); - ted::insert(ted::Position::before(node_or_token), pub_crate_vis.syntax()); - } + if vis.is_none() + && let Some(node_or_token) = node_or_token_opt + { + let pub_crate_vis = make::visibility_pub_crate().clone_for_update(); + ted::insert(ted::Position::before(node_or_token), pub_crate_vis.syntax()); } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index 9095b1825f5fd..c56d0b3de5d6a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -215,12 +215,12 @@ fn tag_generics_in_variant(ty: &ast::Type, generics: &mut [(ast::GenericParam, b ast::GenericParam::LifetimeParam(lt) if matches!(token.kind(), T![lifetime_ident]) => { - if let Some(lt) = lt.lifetime() { - if lt.text().as_str() == token.text() { - *tag = true; - tagged_one = true; - break; - } + if let Some(lt) = lt.lifetime() + && lt.text().as_str() == token.text() + { + *tag = true; + tagged_one = true; + break; } } param if matches!(token.kind(), T![ident]) => { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs index d843ac64567aa..79f22381952ae 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs @@ -72,10 +72,10 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> let ty_alias = make::ty_alias("Type", generic_params, None, None, Some((ty, None))) .clone_for_update(); - if let Some(cap) = ctx.config.snippet_cap { - if let Some(name) = ty_alias.name() { - edit.add_annotation(name.syntax(), builder.make_tabstop_before(cap)); - } + if let Some(cap) = ctx.config.snippet_cap + && let Some(name) = ty_alias.name() + { + edit.add_annotation(name.syntax(), builder.make_tabstop_before(cap)); } let indent = IndentLevel::from_node(node); @@ -111,17 +111,17 @@ fn collect_used_generics<'gp>( match ty { ast::Type::PathType(ty) => { if let Some(path) = ty.path() { - if let Some(name_ref) = path.as_single_name_ref() { - if let Some(param) = known_generics.iter().find(|gp| { + if let Some(name_ref) = path.as_single_name_ref() + && let Some(param) = known_generics.iter().find(|gp| { match gp { ast::GenericParam::ConstParam(cp) => cp.name(), ast::GenericParam::TypeParam(tp) => tp.name(), _ => None, } .is_some_and(|n| n.text() == name_ref.text()) - }) { - generics.push(param); - } + }) + { + generics.push(param); } generics.extend( path.segments() @@ -160,20 +160,18 @@ fn collect_used_generics<'gp>( .and_then(|lt| known_generics.iter().find(find_lifetime(<.text()))), ), ast::Type::ArrayType(ar) => { - if let Some(ast::Expr::PathExpr(p)) = ar.const_arg().and_then(|x| x.expr()) { - if let Some(path) = p.path() { - if let Some(name_ref) = path.as_single_name_ref() { - if let Some(param) = known_generics.iter().find(|gp| { - if let ast::GenericParam::ConstParam(cp) = gp { - cp.name().is_some_and(|n| n.text() == name_ref.text()) - } else { - false - } - }) { - generics.push(param); - } + if let Some(ast::Expr::PathExpr(p)) = ar.const_arg().and_then(|x| x.expr()) + && let Some(path) = p.path() + && let Some(name_ref) = path.as_single_name_ref() + && let Some(param) = known_generics.iter().find(|gp| { + if let ast::GenericParam::ConstParam(cp) = gp { + cp.name().is_some_and(|n| n.text() == name_ref.text()) + } else { + false } - } + }) + { + generics.push(param); } } _ => (), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index db2d316d58ee3..c9c1969b9e023 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -404,11 +404,10 @@ impl Anchor { } if let Some(expr) = node.parent().and_then(ast::StmtList::cast).and_then(|it| it.tail_expr()) + && expr.syntax() == &node { - if expr.syntax() == &node { - cov_mark::hit!(test_extract_var_last_expr); - return Some(Anchor::Before(node)); - } + cov_mark::hit!(test_extract_var_last_expr); + return Some(Anchor::Before(node)); } if let Some(parent) = node.parent() { @@ -427,10 +426,10 @@ impl Anchor { } if let Some(stmt) = ast::Stmt::cast(node.clone()) { - if let ast::Stmt::ExprStmt(stmt) = stmt { - if stmt.expr().as_ref() == Some(to_extract) { - return Some(Anchor::Replace(stmt)); - } + if let ast::Stmt::ExprStmt(stmt) = stmt + && stmt.expr().as_ref() == Some(to_extract) + { + return Some(Anchor::Replace(stmt)); } return Some(Anchor::Before(node)); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs index 6063898076042..2c81e2883a34a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -2,9 +2,11 @@ use hir::{HasCrate, HasVisibility}; use ide_db::{FxHashSet, path_transform::PathTransform}; use syntax::{ ast::{ - self, AstNode, HasGenericParams, HasName, HasVisibility as _, edit_in_place::Indent, make, + self, AstNode, HasGenericParams, HasName, HasVisibility as _, + edit::{AstNodeEdit, IndentLevel}, + make, }, - ted, + syntax_editor::Position, }; use crate::{ @@ -165,54 +167,66 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' is_unsafe, is_gen, ) - .clone_for_update(); - - // Get the impl to update, or create one if we need to. - let impl_def = match impl_def { - Some(impl_def) => edit.make_mut(impl_def), + .indent(IndentLevel(1)); + let item = ast::AssocItem::Fn(f.clone()); + + let mut editor = edit.make_editor(strukt.syntax()); + let fn_: Option = match impl_def { + Some(impl_def) => match impl_def.assoc_item_list() { + Some(assoc_item_list) => { + let item = item.indent(IndentLevel::from_node(impl_def.syntax())); + assoc_item_list.add_items(&mut editor, vec![item.clone()]); + Some(item) + } + None => { + let assoc_item_list = make::assoc_item_list(Some(vec![item])); + editor.insert( + Position::last_child_of(impl_def.syntax()), + assoc_item_list.syntax(), + ); + assoc_item_list.assoc_items().next() + } + }, None => { let name = &strukt_name.to_string(); let ty_params = strukt.generic_param_list(); let ty_args = ty_params.as_ref().map(|it| it.to_generic_args()); let where_clause = strukt.where_clause(); + let assoc_item_list = make::assoc_item_list(Some(vec![item])); let impl_def = make::impl_( ty_params, ty_args, make::ty_path(make::ext::ident_path(name)), where_clause, - None, + Some(assoc_item_list), ) .clone_for_update(); // Fixup impl_def indentation let indent = strukt.indent_level(); - impl_def.reindent_to(indent); + let impl_def = impl_def.indent(indent); // Insert the impl block. let strukt = edit.make_mut(strukt.clone()); - ted::insert_all( - ted::Position::after(strukt.syntax()), + editor.insert_all( + Position::after(strukt.syntax()), vec![ make::tokens::whitespace(&format!("\n\n{indent}")).into(), impl_def.syntax().clone().into(), ], ); - - impl_def + impl_def.assoc_item_list().and_then(|list| list.assoc_items().next()) } }; - // Fixup function indentation. - // FIXME: Should really be handled by `AssocItemList::add_item` - f.reindent_to(impl_def.indent_level() + 1); - - let assoc_items = impl_def.get_or_create_assoc_item_list(); - assoc_items.add_item(f.clone().into()); - - if let Some(cap) = ctx.config.snippet_cap { - edit.add_tabstop_before(cap, f) + if let Some(cap) = ctx.config.snippet_cap + && let Some(fn_) = fn_ + { + let tabstop = edit.make_tabstop_before(cap); + editor.add_annotation(fn_.syntax(), tabstop); } + edit.add_file_edits(ctx.vfs_file_id(), editor); }, )?; } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs index d4d1b3490cb64..77232dfebdfe4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs @@ -148,11 +148,11 @@ fn make_example_for_fn(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option) -> Option) -> Option { /// `None` if function without a body; some bool to guess if function can panic fn can_panic(ast_func: &ast::Fn) -> Option { let body = ast_func.body()?.to_string(); - let can_panic = body.contains("panic!(") - // FIXME it would be better to not match `debug_assert*!` macro invocations - || body.contains("assert!(") - || body.contains(".unwrap()") - || body.contains(".expect("); - Some(can_panic) + let mut iter = body.chars(); + let assert_postfix = |s| { + ["!(", "_eq!(", "_ne!(", "_matches!("].iter().any(|postfix| str::starts_with(s, postfix)) + }; + + while !iter.as_str().is_empty() { + let s = iter.as_str(); + iter.next(); + if s.strip_prefix("debug_assert").is_some_and(assert_postfix) { + iter.nth(10); + continue; + } + if s.strip_prefix("assert").is_some_and(assert_postfix) + || s.starts_with("panic!(") + || s.starts_with(".unwrap()") + || s.starts_with(".expect(") + { + return Some(true); + } + } + + Some(false) } /// Helper function to get the name that should be given to `self` arguments @@ -677,6 +693,24 @@ pub fn panics_if(a: bool) { ); } + #[test] + fn guesses_debug_assert_macro_cannot_panic() { + check_assist( + generate_documentation_template, + r#" +pub fn $0debug_panics_if_not(a: bool) { + debug_assert!(a == true); +} +"#, + r#" +/// . +pub fn debug_panics_if_not(a: bool) { + debug_assert!(a == true); +} +"#, + ); + } + #[test] fn guesses_assert_macro_can_panic() { check_assist( @@ -699,6 +733,28 @@ pub fn panics_if_not(a: bool) { ); } + #[test] + fn guesses_assert_eq_macro_can_panic() { + check_assist( + generate_documentation_template, + r#" +pub fn $0panics_if_not(a: bool) { + assert_eq!(a, true); +} +"#, + r#" +/// . +/// +/// # Panics +/// +/// Panics if . +pub fn panics_if_not(a: bool) { + assert_eq!(a, true); +} +"#, + ); + } + #[test] fn guesses_unwrap_can_panic() { check_assist( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs index b63baa696d9ae..3c327a63b0f0b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs @@ -111,10 +111,10 @@ pub(crate) fn generate_fn_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ], ); - if let Some(cap) = ctx.config.snippet_cap { - if let Some(name) = ty_alias.name() { - edit.add_annotation(name.syntax(), builder.make_placeholder_snippet(cap)); - } + if let Some(cap) = ctx.config.snippet_cap + && let Some(name) = ty_alias.name() + { + edit.add_annotation(name.syntax(), builder.make_placeholder_snippet(cap)); } builder.add_file_edits(ctx.vfs_file_id(), edit); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs index 3290a70e1c69c..613b32fcc1653 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs @@ -70,10 +70,10 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let TargetInfo { target_module, adt_info, target, file } = fn_target_info(ctx, path, &call, fn_name)?; - if let Some(m) = target_module { - if !is_editable_crate(m.krate(), ctx.db()) { - return None; - } + if let Some(m) = target_module + && !is_editable_crate(m.krate(), ctx.db()) + { + return None; } let function_builder = diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs index 20ee9253d379c..807b9194b2df7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs @@ -433,12 +433,11 @@ fn build_source_change( new_fn.indent(1.into()); // Insert a tabstop only for last method we generate - if i == record_fields_count - 1 { - if let Some(cap) = ctx.config.snippet_cap { - if let Some(name) = new_fn.name() { - builder.add_tabstop_before(cap, name); - } - } + if i == record_fields_count - 1 + && let Some(cap) = ctx.config.snippet_cap + && let Some(name) = new_fn.name() + { + builder.add_tabstop_before(cap, name); } assoc_item_list.add_item(new_fn.clone().into()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs index 31cadcf5ea86b..b38ee6f7dce8e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs @@ -58,11 +58,11 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio let mut editor = edit.make_editor(nominal.syntax()); // Add a tabstop after the left curly brace - if let Some(cap) = ctx.config.snippet_cap { - if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) { - let tabstop = edit.make_tabstop_after(cap); - editor.add_annotation(l_curly, tabstop); - } + if let Some(cap) = ctx.config.snippet_cap + && let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) + { + let tabstop = edit.make_tabstop_after(cap); + editor.add_annotation(l_curly, tabstop); } insert_impl(&mut editor, &impl_, &nominal); @@ -201,7 +201,6 @@ pub(crate) fn generate_impl_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> &impl_, &target_scope, ); - let assoc_items = assoc_items.into_iter().map(either::Either::Right).collect(); let assoc_item_list = make::assoc_item_list(Some(assoc_items)); make_impl_(Some(assoc_item_list)) }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs index 9c4bcdd403042..ae1ae24d1ec1c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs @@ -104,7 +104,14 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_> format!("Generate `{trait_new}` impl from this `{trait_name}` trait"), target, |edit| { - edit.insert(target.start(), format!("$0{impl_def}\n\n{indent}")); + edit.insert( + target.start(), + if ctx.config.snippet_cap.is_some() { + format!("$0{impl_def}\n\n{indent}") + } else { + format!("{impl_def}\n\n{indent}") + }, + ); }, ) } @@ -161,7 +168,10 @@ fn process_ret_type(ref_ty: &ast::RetType) -> Option { #[cfg(test)] mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; + use crate::{ + AssistConfig, + tests::{TEST_CONFIG, check_assist, check_assist_not_applicable, check_assist_with_config}, + }; use super::*; @@ -402,6 +412,43 @@ impl Index$0 for [T; 3] {} pub trait AsRef {} impl AsRef$0 for [T; 3] {} +"#, + ); + } + + #[test] + fn no_snippets() { + check_assist_with_config( + generate_mut_trait_impl, + AssistConfig { snippet_cap: None, ..TEST_CONFIG }, + r#" +//- minicore: index +pub enum Axis { X = 0, Y = 1, Z = 2 } + +impl core::ops::Index$0 for [T; 3] { + type Output = T; + + fn index(&self, index: Axis) -> &Self::Output { + &self[index as usize] + } +} +"#, + r#" +pub enum Axis { X = 0, Y = 1, Z = 2 } + +impl core::ops::IndexMut for [T; 3] { + fn index_mut(&mut self, index: Axis) -> &mut Self::Output { + &mut self[index as usize] + } +} + +impl core::ops::Index for [T; 3] { + type Output = T; + + fn index(&self, index: Axis) -> &Self::Output { + &self[index as usize] + } +} "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs index 5bda1226cda36..351f134612f00 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs @@ -168,7 +168,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option ); fn_.syntax().clone() } else { - let items = vec![either::Either::Right(ast::AssocItem::Fn(fn_))]; + let items = vec![ast::AssocItem::Fn(fn_)]; let list = make::assoc_item_list(Some(items)); editor.insert(Position::after(impl_def.syntax()), list.syntax()); list.syntax().clone() @@ -176,7 +176,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option } else { // Generate a new impl to add the method to let indent_level = strukt.indent_level(); - let body = vec![either::Either::Right(ast::AssocItem::Fn(fn_))]; + let body = vec![ast::AssocItem::Fn(fn_)]; let list = make::assoc_item_list(Some(body)); let impl_def = generate_impl_with_item(&ast::Adt::Struct(strukt.clone()), Some(list)); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index 92a4bd35b3e78..56500cf068024 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -2,12 +2,8 @@ use crate::assist_context::{AssistContext, Assists}; use ide_db::assists::AssistId; use syntax::{ AstNode, SyntaxKind, T, - ast::{ - self, HasGenericParams, HasName, HasVisibility, - edit_in_place::{HasVisibilityEdit, Indent}, - make, - }, - ted::{self, Position}, + ast::{self, HasGenericParams, HasName, HasVisibility, edit_in_place::Indent, make}, + syntax_editor::{Position, SyntaxEditor}, }; // NOTES : @@ -88,8 +84,8 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ return None; } - let assoc_items = impl_ast.assoc_item_list()?; - let first_element = assoc_items.assoc_items().next(); + let impl_assoc_items = impl_ast.assoc_item_list()?; + let first_element = impl_assoc_items.assoc_items().next(); first_element.as_ref()?; let impl_name = impl_ast.self_ty()?; @@ -99,20 +95,16 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ "Generate trait from impl", impl_ast.syntax().text_range(), |builder| { - let impl_ast = builder.make_mut(impl_ast); - let trait_items = assoc_items.clone_for_update(); - let impl_items = builder.make_mut(assoc_items); - let impl_name = builder.make_mut(impl_name); - - trait_items.assoc_items().for_each(|item| { - strip_body(&item); - remove_items_visibility(&item); - }); - - impl_items.assoc_items().for_each(|item| { - remove_items_visibility(&item); - }); - + let trait_items: ast::AssocItemList = { + let trait_items = impl_assoc_items.clone_subtree(); + let mut trait_items_editor = SyntaxEditor::new(trait_items.syntax().clone()); + + trait_items.assoc_items().for_each(|item| { + strip_body(&mut trait_items_editor, &item); + remove_items_visibility(&mut trait_items_editor, &item); + }); + ast::AssocItemList::cast(trait_items_editor.finish().new_root().clone()).unwrap() + }; let trait_ast = make::trait_( false, "NewTrait", @@ -130,6 +122,7 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ trait_name_ref.syntax().clone().into(), make::tokens::single_space().into(), make::token(T![for]).into(), + make::tokens::single_space().into(), ]; if let Some(params) = impl_ast.generic_param_list() { @@ -137,10 +130,15 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ elements.insert(1, gen_args.syntax().clone().into()); } - ted::insert_all(Position::before(impl_name.syntax()), elements); + let mut editor = builder.make_editor(impl_ast.syntax()); + impl_assoc_items.assoc_items().for_each(|item| { + remove_items_visibility(&mut editor, &item); + }); + + editor.insert_all(Position::before(impl_name.syntax()), elements); // Insert trait before TraitImpl - ted::insert_all_raw( + editor.insert_all( Position::before(impl_ast.syntax()), vec![ trait_ast.syntax().clone().into(), @@ -150,11 +148,12 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ // Link the trait name & trait ref names together as a placeholder snippet group if let Some(cap) = ctx.config.snippet_cap { - builder.add_placeholder_snippet_group( - cap, - vec![trait_name.syntax().clone(), trait_name_ref.syntax().clone()], - ); + let placeholder = builder.make_placeholder_snippet(cap); + editor.add_annotation(trait_name.syntax(), placeholder); + editor.add_annotation(trait_name_ref.syntax(), placeholder); } + + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ); @@ -162,31 +161,33 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ } /// `E0449` Trait items always share the visibility of their trait -fn remove_items_visibility(item: &ast::AssocItem) { +fn remove_items_visibility(editor: &mut SyntaxEditor, item: &ast::AssocItem) { if let Some(has_vis) = ast::AnyHasVisibility::cast(item.syntax().clone()) { if let Some(vis) = has_vis.visibility() && let Some(token) = vis.syntax().next_sibling_or_token() && token.kind() == SyntaxKind::WHITESPACE { - ted::remove(token); + editor.delete(token); + } + if let Some(vis) = has_vis.visibility() { + editor.delete(vis.syntax()); } - has_vis.set_visibility(None); } } -fn strip_body(item: &ast::AssocItem) { - if let ast::AssocItem::Fn(f) = item { - if let Some(body) = f.body() { - // In contrast to function bodies, we want to see no ws before a semicolon. - // So let's remove them if we see any. - if let Some(prev) = body.syntax().prev_sibling_or_token() { - if prev.kind() == SyntaxKind::WHITESPACE { - ted::remove(prev); - } - } - - ted::replace(body.syntax(), make::tokens::semicolon()); +fn strip_body(editor: &mut SyntaxEditor, item: &ast::AssocItem) { + if let ast::AssocItem::Fn(f) = item + && let Some(body) = f.body() + { + // In contrast to function bodies, we want to see no ws before a semicolon. + // So let's remove them if we see any. + if let Some(prev) = body.syntax().prev_sibling_or_token() + && prev.kind() == SyntaxKind::WHITESPACE + { + editor.delete(prev); } + + editor.replace(body.syntax(), make::tokens::semicolon()); }; } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs index 1549b414dcc18..5367350052cbe 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs @@ -393,19 +393,17 @@ fn inline( // `FileReference` incorrect if let Some(imp) = sema.ancestors_with_macros(fn_body.syntax().clone()).find_map(ast::Impl::cast) + && !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) + && let Some(t) = imp.self_ty() { - if !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) { - if let Some(t) = imp.self_ty() { - while let Some(self_tok) = body - .syntax() - .descendants_with_tokens() - .filter_map(NodeOrToken::into_token) - .find(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW) - { - let replace_with = t.clone_subtree().syntax().clone_for_update(); - ted::replace(self_tok, replace_with); - } - } + while let Some(self_tok) = body + .syntax() + .descendants_with_tokens() + .filter_map(NodeOrToken::into_token) + .find(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW) + { + let replace_with = t.clone_subtree().syntax().clone_for_update(); + ted::replace(self_tok, replace_with); } } @@ -415,10 +413,10 @@ fn inline( for stmt in fn_body.statements() { if let Some(let_stmt) = ast::LetStmt::cast(stmt.syntax().to_owned()) { for has_token in let_stmt.syntax().children_with_tokens() { - if let Some(node) = has_token.as_node() { - if let Some(ident_pat) = ast::IdentPat::cast(node.to_owned()) { - func_let_vars.insert(ident_pat.syntax().text().to_string()); - } + if let Some(node) = has_token.as_node() + && let Some(ident_pat) = ast::IdentPat::cast(node.to_owned()) + { + func_let_vars.insert(ident_pat.syntax().text().to_string()); } } } @@ -534,16 +532,15 @@ fn inline( } } - if let Some(generic_arg_list) = generic_arg_list.clone() { - if let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax())) - { - body.reindent_to(IndentLevel(0)); - if let Some(new_body) = ast::BlockExpr::cast( - PathTransform::function_call(target, source, function, generic_arg_list) - .apply(body.syntax()), - ) { - body = new_body; - } + if let Some(generic_arg_list) = generic_arg_list.clone() + && let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax())) + { + body.reindent_to(IndentLevel(0)); + if let Some(new_body) = ast::BlockExpr::cast( + PathTransform::function_call(target, source, function, generic_arg_list) + .apply(body.syntax()), + ) { + body = new_body; } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs index 4511072b041b1..ae8d130df23ca 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -9,10 +9,11 @@ use ide_db::{ search::FileReference, }; use itertools::Itertools; +use syntax::ast::syntax_factory::SyntaxFactory; +use syntax::syntax_editor::SyntaxEditor; use syntax::{ AstNode, NodeOrToken, SyntaxNode, - ast::{self, HasGenericParams, HasName, make}, - ted, + ast::{self, HasGenericParams, HasName}, }; use crate::{ @@ -68,37 +69,41 @@ pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>) let mut definition_deleted = false; let mut inline_refs_for_file = |file_id, refs: Vec| { - builder.edit_file(file_id); + let source = ctx.sema.parse(file_id); + let mut editor = builder.make_editor(source.syntax()); let (path_types, path_type_uses) = split_refs_and_uses(builder, refs, |path_type| { path_type.syntax().ancestors().nth(3).and_then(ast::PathType::cast) }); - path_type_uses .iter() .flat_map(ast_to_remove_for_path_in_use_stmt) - .for_each(|x| builder.delete(x.syntax().text_range())); + .for_each(|x| editor.delete(x.syntax())); + for (target, replacement) in path_types.into_iter().filter_map(|path_type| { - let replacement = inline(&ast_alias, &path_type)?.to_text(&concrete_type); - let target = path_type.syntax().text_range(); + let replacement = + inline(&ast_alias, &path_type)?.replace_generic(&concrete_type); + let target = path_type.syntax().clone(); Some((target, replacement)) }) { - builder.replace(target, replacement); + editor.replace(target, replacement); } - if file_id == ctx.vfs_file_id() { - builder.delete(ast_alias.syntax().text_range()); + if file_id.file_id(ctx.db()) == ctx.vfs_file_id() { + editor.delete(ast_alias.syntax()); definition_deleted = true; } + builder.add_file_edits(file_id.file_id(ctx.db()), editor); }; for (file_id, refs) in usages.into_iter() { - inline_refs_for_file(file_id.file_id(ctx.db()), refs); + inline_refs_for_file(file_id, refs); } if !definition_deleted { - builder.edit_file(ctx.vfs_file_id()); - builder.delete(ast_alias.syntax().text_range()); + let mut editor = builder.make_editor(ast_alias.syntax()); + editor.delete(ast_alias.syntax()); + builder.add_file_edits(ctx.vfs_file_id(), editor) } }, ) @@ -146,23 +151,26 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O } } - let target = alias_instance.syntax().text_range(); - acc.add( AssistId::refactor_inline("inline_type_alias"), "Inline type alias", - target, - |builder| builder.replace(target, replacement.to_text(&concrete_type)), + alias_instance.syntax().text_range(), + |builder| { + let mut editor = builder.make_editor(alias_instance.syntax()); + let replace = replacement.replace_generic(&concrete_type); + editor.replace(alias_instance.syntax(), replace); + builder.add_file_edits(ctx.vfs_file_id(), editor); + }, ) } impl Replacement { - fn to_text(&self, concrete_type: &ast::Type) -> String { + fn replace_generic(&self, concrete_type: &ast::Type) -> SyntaxNode { match self { Replacement::Generic { lifetime_map, const_and_type_map } => { create_replacement(lifetime_map, const_and_type_map, concrete_type) } - Replacement::Plain => concrete_type.to_string(), + Replacement::Plain => concrete_type.syntax().clone_subtree().clone_for_update(), } } } @@ -199,8 +207,8 @@ impl LifetimeMap { alias_generics: &ast::GenericParamList, ) -> Option { let mut inner = FxHashMap::default(); - - let wildcard_lifetime = make::lifetime("'_"); + let make = SyntaxFactory::without_mappings(); + let wildcard_lifetime = make.lifetime("'_"); let lifetimes = alias_generics .lifetime_params() .filter_map(|lp| lp.lifetime()) @@ -299,15 +307,14 @@ fn create_replacement( lifetime_map: &LifetimeMap, const_and_type_map: &ConstAndTypeMap, concrete_type: &ast::Type, -) -> String { - let updated_concrete_type = concrete_type.clone_for_update(); - let mut replacements = Vec::new(); - let mut removals = Vec::new(); +) -> SyntaxNode { + let updated_concrete_type = concrete_type.syntax().clone_subtree(); + let mut editor = SyntaxEditor::new(updated_concrete_type.clone()); - for syntax in updated_concrete_type.syntax().descendants() { - let syntax_string = syntax.to_string(); - let syntax_str = syntax_string.as_str(); + let mut replacements: Vec<(SyntaxNode, SyntaxNode)> = Vec::new(); + let mut removals: Vec> = Vec::new(); + for syntax in updated_concrete_type.descendants() { if let Some(old_lifetime) = ast::Lifetime::cast(syntax.clone()) { if let Some(new_lifetime) = lifetime_map.0.get(&old_lifetime.to_string()) { if new_lifetime.text() == "'_" { @@ -322,12 +329,16 @@ fn create_replacement( replacements.push((syntax.clone(), new_lifetime.syntax().clone_for_update())); } - } else if let Some(replacement_syntax) = const_and_type_map.0.get(syntax_str) { + } else if let Some(name_ref) = ast::NameRef::cast(syntax.clone()) { + let Some(replacement_syntax) = const_and_type_map.0.get(&name_ref.to_string()) else { + continue; + }; let new_string = replacement_syntax.to_string(); let new = if new_string == "_" { - make::wildcard_pat().syntax().clone_for_update() + let make = SyntaxFactory::without_mappings(); + make.wildcard_pat().syntax().clone() } else { - replacement_syntax.clone_for_update() + replacement_syntax.clone() }; replacements.push((syntax.clone(), new)); @@ -335,14 +346,13 @@ fn create_replacement( } for (old, new) in replacements { - ted::replace(old, new); + editor.replace(old, new); } for syntax in removals { - ted::remove(syntax); + editor.delete(syntax); } - - updated_concrete_type.to_string() + editor.finish().new_root().clone() } fn get_type_alias(ctx: &AssistContext<'_>, path: &ast::PathType) -> Option { @@ -377,12 +387,15 @@ impl ConstOrTypeGeneric { } fn replacement_value(&self) -> Option { - Some(match self { - ConstOrTypeGeneric::ConstArg(ca) => ca.expr()?.syntax().clone(), - ConstOrTypeGeneric::TypeArg(ta) => ta.syntax().clone(), - ConstOrTypeGeneric::ConstParam(cp) => cp.default_val()?.syntax().clone(), - ConstOrTypeGeneric::TypeParam(tp) => tp.default_type()?.syntax().clone(), - }) + Some( + match self { + ConstOrTypeGeneric::ConstArg(ca) => ca.expr()?.syntax().clone(), + ConstOrTypeGeneric::TypeArg(ta) => ta.syntax().clone(), + ConstOrTypeGeneric::ConstParam(cp) => cp.default_val()?.syntax().clone(), + ConstOrTypeGeneric::TypeParam(tp) => tp.default_type()?.syntax().clone(), + } + .clone_for_update(), + ) } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs index 0c1dc9eb9349f..a645c8b90afc4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs @@ -43,10 +43,10 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> let db = ctx.db(); let const_: ast::Const = ctx.find_node_at_offset()?; // Don't show the assist when the cursor is at the const's body. - if let Some(body) = const_.body() { - if body.syntax().text_range().contains(ctx.offset()) { - return None; - } + if let Some(body) = const_.body() + && body.syntax().text_range().contains(ctx.offset()) + { + return None; } let parent_fn = const_.syntax().ancestors().find_map(ast::Fn::cast)?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs index 1b0c313935376..21debf6745a67 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs @@ -62,10 +62,10 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) -> return None; }; - if let Some(parent) = tgt.syntax().parent() { - if matches!(parent.kind(), syntax::SyntaxKind::BIN_EXPR | syntax::SyntaxKind::LET_STMT) { - return None; - } + if let Some(parent) = tgt.syntax().parent() + && matches!(parent.kind(), syntax::SyntaxKind::BIN_EXPR | syntax::SyntaxKind::LET_STMT) + { + return None; } let target = tgt.syntax().text_range(); @@ -90,10 +90,10 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) -> let mut editor = SyntaxEditor::new(edit_tgt); for (stmt, rhs) in assignments { let mut stmt = stmt.syntax().clone(); - if let Some(parent) = stmt.parent() { - if ast::ExprStmt::cast(parent.clone()).is_some() { - stmt = parent.clone(); - } + if let Some(parent) = stmt.parent() + && ast::ExprStmt::cast(parent.clone()).is_some() + { + stmt = parent.clone(); } editor.replace(stmt, rhs.syntax()); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs index 94b49c5df0915..2cbb24a64fd5a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs @@ -80,15 +80,15 @@ pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> O // parse inside string to escape `"` let escaped = value.escape_default().to_string(); let suffix = string_suffix(token.text()).unwrap_or_default(); - if let Some(offsets) = token.quote_offsets() { - if token.text()[offsets.contents - token.syntax().text_range().start()] == escaped { - let end_quote = offsets.quotes.1; - let end_quote = - TextRange::new(end_quote.start(), end_quote.end() - TextSize::of(suffix)); - edit.replace(offsets.quotes.0, "\""); - edit.replace(end_quote, "\""); - return; - } + if let Some(offsets) = token.quote_offsets() + && token.text()[offsets.contents - token.syntax().text_range().start()] == escaped + { + let end_quote = offsets.quotes.1; + let end_quote = + TextRange::new(end_quote.start(), end_quote.end() - TextSize::of(suffix)); + edit.replace(offsets.quotes.0, "\""); + edit.replace(end_quote, "\""); + return; } edit.replace(token.syntax().text_range(), format!("\"{escaped}\"{suffix}")); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 45bb6ce9129cb..175f261317058 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -221,11 +221,7 @@ fn impl_def_from_trait( } else { Some(first.clone()) }; - let items = first_item - .into_iter() - .chain(other.iter().cloned()) - .map(either::Either::Right) - .collect(); + let items = first_item.into_iter().chain(other.iter().cloned()).collect(); make::assoc_item_list(Some(items)) } else { make::assoc_item_list(None) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs index fa005a411d361..9f742131e5cb4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs @@ -102,10 +102,10 @@ pub(crate) fn replace_qualified_name_with_use( fn drop_generic_args(path: &ast::Path) -> ast::Path { let path = path.clone_for_update(); - if let Some(segment) = path.segment() { - if let Some(generic_args) = segment.generic_arg_list() { - ted::remove(generic_args.syntax()); - } + if let Some(segment) = path.segment() + && let Some(generic_args) = segment.generic_arg_list() + { + ted::remove(generic_args.syntax()); } path } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs index ac10a829bbf1b..b9385775b4765 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs @@ -41,10 +41,10 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O return None; } // Do nothing if the method is a member of trait. - if let Some(impl_) = function.syntax().ancestors().nth(2).and_then(ast::Impl::cast) { - if impl_.trait_().is_some() { - return None; - } + if let Some(impl_) = function.syntax().ancestors().nth(2).and_then(ast::Impl::cast) + && impl_.trait_().is_some() + { + return None; } // Remove the `async` keyword plus whitespace after it, if any. diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs index cf38262fbf443..eea6c85e8df0a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs @@ -72,20 +72,20 @@ pub(crate) fn unwrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> let mut exprs_to_unwrap = Vec::new(); let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_unwrap, e); walk_expr(&body_expr, &mut |expr| { - if let ast::Expr::ReturnExpr(ret_expr) = expr { - if let Some(ret_expr_arg) = &ret_expr.expr() { - for_each_tail_expr(ret_expr_arg, tail_cb); - } + if let ast::Expr::ReturnExpr(ret_expr) = expr + && let Some(ret_expr_arg) = &ret_expr.expr() + { + for_each_tail_expr(ret_expr_arg, tail_cb); } }); for_each_tail_expr(&body_expr, tail_cb); let is_unit_type = is_unit_type(&happy_type); if is_unit_type { - if let Some(NodeOrToken::Token(token)) = ret_type.syntax().next_sibling_or_token() { - if token.kind() == SyntaxKind::WHITESPACE { - editor.delete(token); - } + if let Some(NodeOrToken::Token(token)) = ret_type.syntax().next_sibling_or_token() + && token.kind() == SyntaxKind::WHITESPACE + { + editor.delete(token); } editor.delete(ret_type.syntax()); @@ -162,10 +162,10 @@ pub(crate) fn unwrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> } } - if let Some(cap) = ctx.config.snippet_cap { - if let Some(final_placeholder) = final_placeholder { - editor.add_annotation(final_placeholder.syntax(), builder.make_tabstop_after(cap)); - } + if let Some(cap) = ctx.config.snippet_cap + && let Some(final_placeholder) = final_placeholder + { + editor.add_annotation(final_placeholder.syntax(), builder.make_tabstop_after(cap)); } editor.add_mappings(make.finish_with_mappings()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs index ecfecbb04ff22..46f3e85e12346 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs @@ -47,10 +47,10 @@ pub(crate) fn unwrap_tuple(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option if tuple_pat.fields().count() != tuple_init.fields().count() { return None; } - if let Some(tys) = &tuple_ty { - if tuple_pat.fields().count() != tys.fields().count() { - return None; - } + if let Some(tys) = &tuple_ty + && tuple_pat.fields().count() != tys.fields().count() + { + return None; } let parent = let_kw.parent()?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs index d7189aa5dbbde..0f089c9b66eb0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs @@ -101,24 +101,24 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let mut exprs_to_wrap = Vec::new(); let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e); walk_expr(&body_expr, &mut |expr| { - if let Expr::ReturnExpr(ret_expr) = expr { - if let Some(ret_expr_arg) = &ret_expr.expr() { - for_each_tail_expr(ret_expr_arg, tail_cb); - } + if let Expr::ReturnExpr(ret_expr) = expr + && let Some(ret_expr_arg) = &ret_expr.expr() + { + for_each_tail_expr(ret_expr_arg, tail_cb); } }); for_each_tail_expr(&body_expr, tail_cb); for ret_expr_arg in exprs_to_wrap { - if let Some(ty) = ctx.sema.type_of_expr(&ret_expr_arg) { - if ty.adjusted().could_unify_with(ctx.db(), &semantic_new_return_ty) { - // The type is already correct, don't wrap it. - // We deliberately don't use `could_unify_with_deeply()`, because as long as the outer - // enum matches it's okay for us, as we don't trigger the assist if the return type - // is already `Option`/`Result`, so mismatched exact type is more likely a mistake - // than something intended. - continue; - } + if let Some(ty) = ctx.sema.type_of_expr(&ret_expr_arg) + && ty.adjusted().could_unify_with(ctx.db(), &semantic_new_return_ty) + { + // The type is already correct, don't wrap it. + // We deliberately don't use `could_unify_with_deeply()`, because as long as the outer + // enum matches it's okay for us, as we don't trigger the assist if the return type + // is already `Option`/`Result`, so mismatched exact type is more likely a mistake + // than something intended. + continue; } let happy_wrapped = make.expr_call( @@ -147,13 +147,13 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op ast::GenericArg::LifetimeArg(_) => false, _ => true, }); - if let Some(error_type_arg) = error_type_arg { - if let Some(cap) = ctx.config.snippet_cap { - editor.add_annotation( - error_type_arg.syntax(), - builder.make_placeholder_snippet(cap), - ); - } + if let Some(error_type_arg) = error_type_arg + && let Some(cap) = ctx.config.snippet_cap + { + editor.add_annotation( + error_type_arg.syntax(), + builder.make_placeholder_snippet(cap), + ); } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs index 5183566d136b5..7d5740b748bef 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs @@ -200,13 +200,12 @@ fn wrap_derive( ], ); - if let Some(snippet_cap) = ctx.config.snippet_cap { - if let Some(first_meta) = + if let Some(snippet_cap) = ctx.config.snippet_cap + && let Some(first_meta) = cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token()) - { - let tabstop = edit.make_tabstop_after(snippet_cap); - editor.add_annotation(first_meta, tabstop); - } + { + let tabstop = edit.make_tabstop_after(snippet_cap); + editor.add_annotation(first_meta, tabstop); } editor.add_mappings(make.finish_with_mappings()); @@ -256,13 +255,12 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) -> editor.replace(attr.syntax(), cfg_attr.syntax()); - if let Some(snippet_cap) = ctx.config.snippet_cap { - if let Some(first_meta) = + if let Some(snippet_cap) = ctx.config.snippet_cap + && let Some(first_meta) = cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token()) - { - let tabstop = edit.make_tabstop_after(snippet_cap); - editor.add_annotation(first_meta, tabstop); - } + { + let tabstop = edit.make_tabstop_after(snippet_cap); + editor.add_annotation(first_meta, tabstop); } editor.add_mappings(make.finish_with_mappings()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index 15c7a6a3fc266..91aac9cf7b608 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -23,12 +23,11 @@ use syntax::{ ast::{ self, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace, edit::{AstNodeEdit, IndentLevel}, - edit_in_place::{AttrsOwnerEdit, Removable}, + edit_in_place::AttrsOwnerEdit, make, syntax_factory::SyntaxFactory, }, - syntax_editor::SyntaxEditor, - ted, + syntax_editor::{Removable, SyntaxEditor}, }; use crate::{ @@ -131,10 +130,10 @@ pub fn filter_assoc_items( if ignore_items == IgnoreAssocItems::DocHiddenAttrPresent && assoc_item.attrs(sema.db).has_doc_hidden() { - if let hir::AssocItem::Function(f) = assoc_item { - if !f.has_body(sema.db) { - return true; - } + if let hir::AssocItem::Function(f) = assoc_item + && !f.has_body(sema.db) + { + return true; } return false; } @@ -207,7 +206,7 @@ pub fn add_trait_assoc_items_to_impl( stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`"); } } - original_item.clone_for_update() + original_item } .reset_indent(); @@ -221,31 +220,37 @@ pub fn add_trait_assoc_items_to_impl( cloned_item.remove_attrs_and_docs(); cloned_item }) - .map(|item| { - match &item { - ast::AssocItem::Fn(fn_) if fn_.body().is_none() => { - let body = AstNodeEdit::indent( - &make::block_expr( - None, - Some(match config.expr_fill_default { - ExprFillDefaultMode::Todo => make::ext::expr_todo(), - ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), - ExprFillDefaultMode::Default => make::ext::expr_todo(), - }), - ), - IndentLevel::single(), - ); - ted::replace(fn_.get_or_create_body().syntax(), body.syntax()); - } - ast::AssocItem::TypeAlias(type_alias) => { - if let Some(type_bound_list) = type_alias.type_bound_list() { - type_bound_list.remove() - } + .filter_map(|item| match item { + ast::AssocItem::Fn(fn_) if fn_.body().is_none() => { + let fn_ = fn_.clone_subtree(); + let new_body = &make::block_expr( + None, + Some(match config.expr_fill_default { + ExprFillDefaultMode::Todo => make::ext::expr_todo(), + ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), + ExprFillDefaultMode::Default => make::ext::expr_todo(), + }), + ); + let new_body = AstNodeEdit::indent(new_body, IndentLevel::single()); + let mut fn_editor = SyntaxEditor::new(fn_.syntax().clone()); + fn_.replace_or_insert_body(&mut fn_editor, new_body); + let new_fn_ = fn_editor.finish().new_root().clone(); + ast::AssocItem::cast(new_fn_) + } + ast::AssocItem::TypeAlias(type_alias) => { + let type_alias = type_alias.clone_subtree(); + if let Some(type_bound_list) = type_alias.type_bound_list() { + let mut type_alias_editor = SyntaxEditor::new(type_alias.syntax().clone()); + type_bound_list.remove(&mut type_alias_editor); + let type_alias = type_alias_editor.finish().new_root().clone(); + ast::AssocItem::cast(type_alias) + } else { + Some(ast::AssocItem::TypeAlias(type_alias)) } - _ => {} } - AstNodeEdit::indent(&item, new_indent_level) + item => Some(item), }) + .map(|item| AstNodeEdit::indent(&item, new_indent_level)) .collect() } @@ -514,10 +519,10 @@ pub(crate) fn find_struct_impl( if !(same_ty && not_trait_impl) { None } else { Some(impl_blk) } }); - if let Some(ref impl_blk) = block { - if has_any_fn(impl_blk, names) { - return None; - } + if let Some(ref impl_blk) = block + && has_any_fn(impl_blk, names) + { + return None; } Some(block) @@ -526,12 +531,11 @@ pub(crate) fn find_struct_impl( fn has_any_fn(imp: &ast::Impl, names: &[String]) -> bool { if let Some(il) = imp.assoc_item_list() { for item in il.assoc_items() { - if let ast::AssocItem::Fn(f) = item { - if let Some(name) = f.name() { - if names.iter().any(|n| n.eq_ignore_ascii_case(&name.text())) { - return true; - } - } + if let ast::AssocItem::Fn(f) = item + && let Some(name) = f.name() + && names.iter().any(|n| n.eq_ignore_ascii_case(&name.text())) + { + return true; } } } @@ -1021,12 +1025,12 @@ pub(crate) fn trimmed_text_range(source_file: &SourceFile, initial_range: TextRa pub(crate) fn convert_param_list_to_arg_list(list: ast::ParamList) -> ast::ArgList { let mut args = vec![]; for param in list.params() { - if let Some(ast::Pat::IdentPat(pat)) = param.pat() { - if let Some(name) = pat.name() { - let name = name.to_string(); - let expr = make::expr_path(make::ext::ident_path(&name)); - args.push(expr); - } + if let Some(ast::Pat::IdentPat(pat)) = param.pat() + && let Some(name) = pat.name() + { + let name = name.to_string(); + let expr = make::expr_path(make::ext::ident_path(&name)); + args.push(expr); } } make::arg_list(args) @@ -1138,12 +1142,11 @@ pub fn is_body_const(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> bo }; match expr { ast::Expr::CallExpr(call) => { - if let Some(ast::Expr::PathExpr(path_expr)) = call.expr() { - if let Some(PathResolution::Def(ModuleDef::Function(func))) = + if let Some(ast::Expr::PathExpr(path_expr)) = call.expr() + && let Some(PathResolution::Def(ModuleDef::Function(func))) = path_expr.path().and_then(|path| sema.resolve_path(&path)) - { - is_const &= func.is_const(sema.db); - } + { + is_const &= func.is_const(sema.db); } } ast::Expr::MethodCallExpr(call) => { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs index 65072d936f635..11d26228ba201 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs @@ -111,10 +111,11 @@ impl Completions { ctx: &CompletionContext<'_>, super_chain_len: Option, ) { - if let Some(len) = super_chain_len { - if len > 0 && len < ctx.depth_from_crate_root { - self.add_keyword(ctx, "super::"); - } + if let Some(len) = super_chain_len + && len > 0 + && len < ctx.depth_from_crate_root + { + self.add_keyword(ctx, "super::"); } } @@ -643,10 +644,10 @@ fn enum_variants_with_paths( let variants = enum_.variants(ctx.db); - if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) { - if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) { - variants.iter().for_each(|variant| process_variant(*variant)); - } + if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) + && impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) + { + variants.iter().for_each(|variant| process_variant(*variant)); } for variant in variants { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs index 5340d65a142dd..f75123324f377 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs @@ -258,12 +258,11 @@ fn complete_methods( fn on_trait_method(&mut self, func: hir::Function) -> ControlFlow<()> { // This needs to come before the `seen_methods` test, so that if we see the same method twice, // once as inherent and once not, we will include it. - if let ItemContainer::Trait(trait_) = func.container(self.ctx.db) { - if self.ctx.exclude_traits.contains(&trait_) - || trait_.complete(self.ctx.db) == Complete::IgnoreMethods - { - return ControlFlow::Continue(()); - } + if let ItemContainer::Trait(trait_) = func.container(self.ctx.db) + && (self.ctx.exclude_traits.contains(&trait_) + || trait_.complete(self.ctx.db) == Complete::IgnoreMethods) + { + return ControlFlow::Continue(()); } if func.self_param(self.ctx.db).is_some() diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs index 809e71cc119e0..fb78386976d61 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs @@ -128,10 +128,10 @@ fn params_from_stmt_list_scope( { let module = scope.module().into(); scope.process_all_names(&mut |name, def| { - if let hir::ScopeDef::Local(local) = def { - if let Ok(ty) = local.ty(ctx.db).display_source_code(ctx.db, module, true) { - cb(name, ty); - } + if let hir::ScopeDef::Local(local) = def + && let Ok(ty) = local.ty(ctx.db).display_source_code(ctx.db, module, true) + { + cb(name, ty); } }); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs index bcf8c0ec527af..cdd77e79b5cd7 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -228,24 +228,22 @@ fn add_function_impl_( .set_documentation(func.docs(ctx.db)) .set_relevance(CompletionRelevance { exact_name_match: true, ..Default::default() }); - if let Some(source) = ctx.sema.source(func) { - if let Some(transformed_fn) = + if let Some(source) = ctx.sema.source(func) + && let Some(transformed_fn) = get_transformed_fn(ctx, source.value, impl_def, async_sugaring) - { - let function_decl = - function_declaration(ctx, &transformed_fn, source.file_id.macro_file()); - match ctx.config.snippet_cap { - Some(cap) => { - let snippet = format!("{function_decl} {{\n $0\n}}"); - item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet)); - } - None => { - let header = format!("{function_decl} {{"); - item.text_edit(TextEdit::replace(replacement_range, header)); - } - }; - item.add_to(acc, ctx.db); - } + { + let function_decl = function_declaration(ctx, &transformed_fn, source.file_id.macro_file()); + match ctx.config.snippet_cap { + Some(cap) => { + let snippet = format!("{function_decl} {{\n $0\n}}"); + item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet)); + } + None => { + let header = format!("{function_decl} {{"); + item.text_edit(TextEdit::replace(replacement_range, header)); + } + }; + item.add_to(acc, ctx.db); } } @@ -447,36 +445,36 @@ fn add_const_impl( ) { let const_name = const_.name(ctx.db).map(|n| n.display_no_db(ctx.edition).to_smolstr()); - if let Some(const_name) = const_name { - if let Some(source) = ctx.sema.source(const_) { - let assoc_item = ast::AssocItem::Const(source.value); - if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) { - let transformed_const = match transformed_item { - ast::AssocItem::Const(const_) => const_, - _ => unreachable!(), - }; - - let label = - make_const_compl_syntax(ctx, &transformed_const, source.file_id.macro_file()); - let replacement = format!("{label} "); - - let mut item = - CompletionItem::new(SymbolKind::Const, replacement_range, label, ctx.edition); - item.lookup_by(format_smolstr!("const {const_name}")) - .set_documentation(const_.docs(ctx.db)) - .set_relevance(CompletionRelevance { - exact_name_match: true, - ..Default::default() - }); - match ctx.config.snippet_cap { - Some(cap) => item.snippet_edit( - cap, - TextEdit::replace(replacement_range, format!("{replacement}$0;")), - ), - None => item.text_edit(TextEdit::replace(replacement_range, replacement)), - }; - item.add_to(acc, ctx.db); - } + if let Some(const_name) = const_name + && let Some(source) = ctx.sema.source(const_) + { + let assoc_item = ast::AssocItem::Const(source.value); + if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) { + let transformed_const = match transformed_item { + ast::AssocItem::Const(const_) => const_, + _ => unreachable!(), + }; + + let label = + make_const_compl_syntax(ctx, &transformed_const, source.file_id.macro_file()); + let replacement = format!("{label} "); + + let mut item = + CompletionItem::new(SymbolKind::Const, replacement_range, label, ctx.edition); + item.lookup_by(format_smolstr!("const {const_name}")) + .set_documentation(const_.docs(ctx.db)) + .set_relevance(CompletionRelevance { + exact_name_match: true, + ..Default::default() + }); + match ctx.config.snippet_cap { + Some(cap) => item.snippet_edit( + cap, + TextEdit::replace(replacement_range, format!("{replacement}$0;")), + ), + None => item.text_edit(TextEdit::replace(replacement_range, replacement)), + }; + item.add_to(acc, ctx.db); } } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs index 013747e4d0cc7..3333300045773 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs @@ -26,18 +26,17 @@ pub(crate) fn complete_mod( let mut current_module = ctx.module; // For `mod $0`, `ctx.module` is its parent, but for `mod f$0`, it's `mod f` itself, but we're // interested in its parent. - if ctx.original_token.kind() == SyntaxKind::IDENT { - if let Some(module) = + if ctx.original_token.kind() == SyntaxKind::IDENT + && let Some(module) = ctx.original_token.parent_ancestors().nth(1).and_then(ast::Module::cast) - { - match ctx.sema.to_def(&module) { - Some(module) if module == current_module => { - if let Some(parent) = current_module.parent(ctx.db) { - current_module = parent; - } + { + match ctx.sema.to_def(&module) { + Some(module) if module == current_module => { + if let Some(parent) = current_module.parent(ctx.db) { + current_module = parent; } - _ => {} } + _ => {} } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs index 62fae1cb23746..815ce5145dbec 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs @@ -64,18 +64,17 @@ pub(crate) fn complete_pattern( if let Some(hir::Adt::Enum(e)) = ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt()) + && (refutable || single_variant_enum(e)) { - if refutable || single_variant_enum(e) { - super::enum_variants_with_paths( - acc, - ctx, - e, - &pattern_ctx.impl_, - |acc, ctx, variant, path| { - acc.add_qualified_variant_pat(ctx, pattern_ctx, variant, path); - }, - ); - } + super::enum_variants_with_paths( + acc, + ctx, + e, + &pattern_ctx.impl_, + |acc, ctx, variant, path| { + acc.add_qualified_variant_pat(ctx, pattern_ctx, variant, path); + }, + ); } // FIXME: ideally, we should look at the type we are matching against and diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index d0023852acf9f..0058611a61539 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -65,26 +65,19 @@ pub(crate) fn complete_postfix( let cfg = ctx.config.import_path_config(ctx.is_nightly); - if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() { - if receiver_ty.impls_trait(ctx.db, drop_trait, &[]) { - if let Some(drop_fn) = ctx.famous_defs().core_mem_drop() { - if let Some(path) = - ctx.module.find_path(ctx.db, ItemInNs::Values(drop_fn.into()), cfg) - { - cov_mark::hit!(postfix_drop_completion); - let mut item = postfix_snippet( - "drop", - "fn drop(&mut self)", - &format!( - "{path}($0{receiver_text})", - path = path.display(ctx.db, ctx.edition) - ), - ); - item.set_documentation(drop_fn.docs(ctx.db)); - item.add_to(acc, ctx.db); - } - } - } + if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() + && receiver_ty.impls_trait(ctx.db, drop_trait, &[]) + && let Some(drop_fn) = ctx.famous_defs().core_mem_drop() + && let Some(path) = ctx.module.find_path(ctx.db, ItemInNs::Values(drop_fn.into()), cfg) + { + cov_mark::hit!(postfix_drop_completion); + let mut item = postfix_snippet( + "drop", + "fn drop(&mut self)", + &format!("{path}($0{receiver_text})", path = path.display(ctx.db, ctx.edition)), + ); + item.set_documentation(drop_fn.docs(ctx.db)); + item.add_to(acc, ctx.db); } postfix_snippet("ref", "&expr", &format!("&{receiver_text}")).add_to(acc, ctx.db); @@ -117,56 +110,50 @@ pub(crate) fn complete_postfix( let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references()); let mut is_in_cond = false; - if let Some(parent) = dot_receiver_including_refs.syntax().parent() { - if let Some(second_ancestor) = parent.parent() { - let sec_ancestor_kind = second_ancestor.kind(); - if let Some(expr) = >::cast(second_ancestor) { - is_in_cond = match expr { - Either::Left(it) => it.condition().is_some_and(|cond| *cond.syntax() == parent), - Either::Right(it) => { - it.condition().is_some_and(|cond| *cond.syntax() == parent) - } - } + if let Some(parent) = dot_receiver_including_refs.syntax().parent() + && let Some(second_ancestor) = parent.parent() + { + let sec_ancestor_kind = second_ancestor.kind(); + if let Some(expr) = >::cast(second_ancestor) { + is_in_cond = match expr { + Either::Left(it) => it.condition().is_some_and(|cond| *cond.syntax() == parent), + Either::Right(it) => it.condition().is_some_and(|cond| *cond.syntax() == parent), } - match &try_enum { - Some(try_enum) if is_in_cond => match try_enum { - TryEnum::Result => { - postfix_snippet( - "let", - "let Ok(_)", - &format!("let Ok($0) = {receiver_text}"), - ) - .add_to(acc, ctx.db); - postfix_snippet( - "letm", - "let Ok(mut _)", - &format!("let Ok(mut $0) = {receiver_text}"), - ) - .add_to(acc, ctx.db); - } - TryEnum::Option => { - postfix_snippet( - "let", - "let Some(_)", - &format!("let Some($0) = {receiver_text}"), - ) - .add_to(acc, ctx.db); - postfix_snippet( - "letm", - "let Some(mut _)", - &format!("let Some(mut $0) = {receiver_text}"), - ) - .add_to(acc, ctx.db); - } - }, - _ if matches!(sec_ancestor_kind, STMT_LIST | EXPR_STMT) => { - postfix_snippet("let", "let", &format!("let $0 = {receiver_text};")) - .add_to(acc, ctx.db); - postfix_snippet("letm", "let mut", &format!("let mut $0 = {receiver_text};")) + } + match &try_enum { + Some(try_enum) if is_in_cond => match try_enum { + TryEnum::Result => { + postfix_snippet("let", "let Ok(_)", &format!("let Ok($0) = {receiver_text}")) .add_to(acc, ctx.db); + postfix_snippet( + "letm", + "let Ok(mut _)", + &format!("let Ok(mut $0) = {receiver_text}"), + ) + .add_to(acc, ctx.db); + } + TryEnum::Option => { + postfix_snippet( + "let", + "let Some(_)", + &format!("let Some($0) = {receiver_text}"), + ) + .add_to(acc, ctx.db); + postfix_snippet( + "letm", + "let Some(mut _)", + &format!("let Some(mut $0) = {receiver_text}"), + ) + .add_to(acc, ctx.db); } - _ => (), + }, + _ if matches!(sec_ancestor_kind, STMT_LIST | EXPR_STMT) => { + postfix_snippet("let", "let", &format!("let $0 = {receiver_text};")) + .add_to(acc, ctx.db); + postfix_snippet("letm", "let mut", &format!("let mut $0 = {receiver_text};")) + .add_to(acc, ctx.db); } + _ => (), } } @@ -258,25 +245,25 @@ pub(crate) fn complete_postfix( ) .add_to(acc, ctx.db); postfix_snippet("not", "!expr", &format!("!{receiver_text}")).add_to(acc, ctx.db); - } else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() { - if receiver_ty.impls_trait(ctx.db, trait_, &[]) { - postfix_snippet( - "for", - "for ele in expr {}", - &format!("for ele in {receiver_text} {{\n $0\n}}"), - ) - .add_to(acc, ctx.db); - } + } else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() + && receiver_ty.impls_trait(ctx.db, trait_, &[]) + { + postfix_snippet( + "for", + "for ele in expr {}", + &format!("for ele in {receiver_text} {{\n $0\n}}"), + ) + .add_to(acc, ctx.db); } } let mut block_should_be_wrapped = true; if dot_receiver.syntax().kind() == BLOCK_EXPR { block_should_be_wrapped = false; - if let Some(parent) = dot_receiver.syntax().parent() { - if matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) { - block_should_be_wrapped = true; - } + if let Some(parent) = dot_receiver.syntax().parent() + && matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) + { + block_should_be_wrapped = true; } }; { @@ -292,10 +279,10 @@ pub(crate) fn complete_postfix( postfix_snippet("const", "const {}", &const_completion_string).add_to(acc, ctx.db); } - if let ast::Expr::Literal(literal) = dot_receiver_including_refs.clone() { - if let Some(literal_text) = ast::String::cast(literal.token()) { - add_format_like_completions(acc, ctx, &dot_receiver_including_refs, cap, &literal_text); - } + if let ast::Expr::Literal(literal) = dot_receiver_including_refs.clone() + && let Some(literal_text) = ast::String::cast(literal.token()) + { + add_format_like_completions(acc, ctx, &dot_receiver_including_refs, cap, &literal_text); } postfix_snippet( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs index d2ab193ec3dfa..f39b641649326 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs @@ -54,12 +54,10 @@ pub(crate) fn complete_use_path( for (name, def) in module_scope { if let (Some(attrs), Some(defining_crate)) = (def.attrs(ctx.db), def.krate(ctx.db)) + && (!ctx.check_stability(Some(&attrs)) + || ctx.is_doc_hidden(&attrs, defining_crate)) { - if !ctx.check_stability(Some(&attrs)) - || ctx.is_doc_hidden(&attrs, defining_crate) - { - continue; - } + continue; } let is_name_already_imported = already_imported_names.contains(name.as_str()); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs index 38761f77a2c5f..28d906d91ce5a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs @@ -20,11 +20,11 @@ pub(crate) fn complete_vis_path( // Try completing next child module of the path that is still a parent of the current module let next_towards_current = ctx.module.path_to_root(ctx.db).into_iter().take_while(|it| it != module).last(); - if let Some(next) = next_towards_current { - if let Some(name) = next.name(ctx.db) { - cov_mark::hit!(visibility_qualified); - acc.add_module(ctx, path_ctx, next, name, vec![]); - } + if let Some(next) = next_towards_current + && let Some(name) = next.name(ctx.db) + { + cov_mark::hit!(visibility_qualified); + acc.add_module(ctx, path_ctx, next, name, vec![]); } acc.add_super_keyword(ctx, *super_chain_len); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index ea5fb39338b2e..2eabf99fc697e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -287,24 +287,22 @@ fn expand( &spec_attr, fake_ident_token.clone(), ), - ) { - if let Some((fake_mapped_token, _)) = - fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank) - { - return Some(ExpansionResult { - original_file: original_file.value, - speculative_file, - original_offset, - speculative_offset: fake_ident_token.text_range().start(), - fake_ident_token, - derive_ctx: Some(( - actual_expansion, - fake_expansion, - fake_mapped_token.text_range().start(), - orig_attr, - )), - }); - } + ) && let Some((fake_mapped_token, _)) = + fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank) + { + return Some(ExpansionResult { + original_file: original_file.value, + speculative_file, + original_offset, + speculative_offset: fake_ident_token.text_range().start(), + fake_ident_token, + derive_ctx: Some(( + actual_expansion, + fake_expansion, + fake_mapped_token.text_range().start(), + orig_attr, + )), + }); } if let Some(spec_adt) = @@ -535,14 +533,13 @@ fn analyze<'db>( NameRefKind::Path(PathCompletionCtx { kind: PathKind::Expr { .. }, path, .. }, ..), .. } = &nameref_ctx + && is_in_token_of_for_loop(path) { - if is_in_token_of_for_loop(path) { - // for pat $0 - // there is nothing to complete here except `in` keyword - // don't bother populating the context - // Ideally this special casing wouldn't be needed, but the parser recovers - return None; - } + // for pat $0 + // there is nothing to complete here except `in` keyword + // don't bother populating the context + // Ideally this special casing wouldn't be needed, but the parser recovers + return None; } qual_ctx = qualifier_ctx; @@ -951,29 +948,26 @@ fn classify_name_ref<'db>( let inbetween_body_and_decl_check = |node: SyntaxNode| { if let Some(NodeOrToken::Node(n)) = syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev) + && let Some(item) = ast::Item::cast(n) { - if let Some(item) = ast::Item::cast(n) { - let is_inbetween = match &item { - ast::Item::Const(it) => it.body().is_none() && it.semicolon_token().is_none(), - ast::Item::Enum(it) => it.variant_list().is_none(), - ast::Item::ExternBlock(it) => it.extern_item_list().is_none(), - ast::Item::Fn(it) => it.body().is_none() && it.semicolon_token().is_none(), - ast::Item::Impl(it) => it.assoc_item_list().is_none(), - ast::Item::Module(it) => { - it.item_list().is_none() && it.semicolon_token().is_none() - } - ast::Item::Static(it) => it.body().is_none(), - ast::Item::Struct(it) => { - it.field_list().is_none() && it.semicolon_token().is_none() - } - ast::Item::Trait(it) => it.assoc_item_list().is_none(), - ast::Item::TypeAlias(it) => it.ty().is_none() && it.semicolon_token().is_none(), - ast::Item::Union(it) => it.record_field_list().is_none(), - _ => false, - }; - if is_inbetween { - return Some(item); + let is_inbetween = match &item { + ast::Item::Const(it) => it.body().is_none() && it.semicolon_token().is_none(), + ast::Item::Enum(it) => it.variant_list().is_none(), + ast::Item::ExternBlock(it) => it.extern_item_list().is_none(), + ast::Item::Fn(it) => it.body().is_none() && it.semicolon_token().is_none(), + ast::Item::Impl(it) => it.assoc_item_list().is_none(), + ast::Item::Module(it) => it.item_list().is_none() && it.semicolon_token().is_none(), + ast::Item::Static(it) => it.body().is_none(), + ast::Item::Struct(it) => { + it.field_list().is_none() && it.semicolon_token().is_none() } + ast::Item::Trait(it) => it.assoc_item_list().is_none(), + ast::Item::TypeAlias(it) => it.ty().is_none() && it.semicolon_token().is_none(), + ast::Item::Union(it) => it.record_field_list().is_none(), + _ => false, + }; + if is_inbetween { + return Some(item); } } None @@ -1502,10 +1496,10 @@ fn classify_name_ref<'db>( } }; } - } else if let Some(segment) = path.segment() { - if segment.coloncolon_token().is_some() { - path_ctx.qualified = Qualified::Absolute; - } + } else if let Some(segment) = path.segment() + && segment.coloncolon_token().is_some() + { + path_ctx.qualified = Qualified::Absolute; } let mut qualifier_ctx = QualifierCtx::default(); @@ -1530,38 +1524,30 @@ fn classify_name_ref<'db>( if let Some(top) = top_node { if let Some(NodeOrToken::Node(error_node)) = syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev) + && error_node.kind() == SyntaxKind::ERROR { - if error_node.kind() == SyntaxKind::ERROR { - for token in - error_node.children_with_tokens().filter_map(NodeOrToken::into_token) - { - match token.kind() { - SyntaxKind::UNSAFE_KW => qualifier_ctx.unsafe_tok = Some(token), - SyntaxKind::ASYNC_KW => qualifier_ctx.async_tok = Some(token), - SyntaxKind::SAFE_KW => qualifier_ctx.safe_tok = Some(token), - _ => {} - } + for token in error_node.children_with_tokens().filter_map(NodeOrToken::into_token) { + match token.kind() { + SyntaxKind::UNSAFE_KW => qualifier_ctx.unsafe_tok = Some(token), + SyntaxKind::ASYNC_KW => qualifier_ctx.async_tok = Some(token), + SyntaxKind::SAFE_KW => qualifier_ctx.safe_tok = Some(token), + _ => {} } - qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast); } + qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast); } - if let PathKind::Item { .. } = path_ctx.kind { - if qualifier_ctx.none() { - if let Some(t) = top.first_token() { - if let Some(prev) = t - .prev_token() - .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev)) - { - if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) { - // This was inferred to be an item position path, but it seems - // to be part of some other broken node which leaked into an item - // list - return None; - } - } - } - } + if let PathKind::Item { .. } = path_ctx.kind + && qualifier_ctx.none() + && let Some(t) = top.first_token() + && let Some(prev) = + t.prev_token().and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev)) + && ![T![;], T!['}'], T!['{']].contains(&prev.kind()) + { + // This was inferred to be an item position path, but it seems + // to be part of some other broken node which leaked into an item + // list + return None; } } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs index dcaac3997b275..f27cd07816657 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs @@ -636,10 +636,10 @@ impl Builder { } pub(crate) fn set_detail(&mut self, detail: Option>) -> &mut Builder { self.detail = detail.map(Into::into); - if let Some(detail) = &self.detail { - if never!(detail.contains('\n'), "multiline detail:\n{}", detail) { - self.detail = Some(detail.split('\n').next().unwrap().to_owned()); - } + if let Some(detail) = &self.detail + && never!(detail.contains('\n'), "multiline detail:\n{}", detail) + { + self.detail = Some(detail.split('\n').next().unwrap().to_owned()); } self } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index 1fdd4cdb1c6bb..a70a1138d2f42 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -208,9 +208,9 @@ pub fn completions( // when the user types a bare `_` (that is it does not belong to an identifier) // the user might just wanted to type a `_` for type inference or pattern discarding // so try to suppress completions in those cases - if trigger_character == Some('_') && ctx.original_token.kind() == syntax::SyntaxKind::UNDERSCORE - { - if let CompletionAnalysis::NameRef(NameRefContext { + if trigger_character == Some('_') + && ctx.original_token.kind() == syntax::SyntaxKind::UNDERSCORE + && let CompletionAnalysis::NameRef(NameRefContext { kind: NameRefKind::Path( path_ctx @ PathCompletionCtx { @@ -220,11 +220,9 @@ pub fn completions( ), .. }) = analysis - { - if path_ctx.is_trivial_path() { - return None; - } - } + && path_ctx.is_trivial_path() + { + return None; } { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index c6b8af3c79a2d..3d7a4067c2cd0 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -164,19 +164,18 @@ pub(crate) fn render_field( let expected_fn_type = ctx.completion.expected_type.as_ref().is_some_and(|ty| ty.is_fn() || ty.is_closure()); - if !expected_fn_type { - if let Some(receiver) = &dot_access.receiver { - if let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone()) { - builder.insert(receiver.syntax().text_range().start(), "(".to_owned()); - builder.insert(ctx.source_range().end(), ")".to_owned()); - - let is_parens_needed = - !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); - - if is_parens_needed { - builder.insert(ctx.source_range().end(), "()".to_owned()); - } - } + if !expected_fn_type + && let Some(receiver) = &dot_access.receiver + && let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone()) + { + builder.insert(receiver.syntax().text_range().start(), "(".to_owned()); + builder.insert(ctx.source_range().end(), ")".to_owned()); + + let is_parens_needed = + !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); + + if is_parens_needed { + builder.insert(ctx.source_range().end(), "()".to_owned()); } } @@ -184,12 +183,11 @@ pub(crate) fn render_field( } else { item.insert_text(field_with_receiver(receiver.as_deref(), &escaped_name)); } - if let Some(receiver) = &dot_access.receiver { - if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) { - if let Some(ref_mode) = compute_ref_match(ctx.completion, ty) { - item.ref_match(ref_mode, original.syntax().text_range().start()); - } - } + if let Some(receiver) = &dot_access.receiver + && let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) + && let Some(ref_mode) = compute_ref_match(ctx.completion, ty) + { + item.ref_match(ref_mode, original.syntax().text_range().start()); } item.doc_aliases(ctx.doc_aliases); item.build(db) @@ -437,26 +435,21 @@ fn render_resolution_path( path_ctx, PathCompletionCtx { kind: PathKind::Type { .. }, has_type_args: false, .. } ) && config.callable.is_some(); - if type_path_no_ty_args { - if let Some(cap) = cap { - let has_non_default_type_params = match resolution { - ScopeDef::ModuleDef(hir::ModuleDef::Adt(it)) => it.has_non_default_type_params(db), - ScopeDef::ModuleDef(hir::ModuleDef::TypeAlias(it)) => { - it.has_non_default_type_params(db) - } - _ => false, - }; - - if has_non_default_type_params { - cov_mark::hit!(inserts_angle_brackets_for_generics); - item.lookup_by(name.clone()) - .label(SmolStr::from_iter([&name, "<…>"])) - .trigger_call_info() - .insert_snippet( - cap, - format!("{}<$0>", local_name.display(db, completion.edition)), - ); + if type_path_no_ty_args && let Some(cap) = cap { + let has_non_default_type_params = match resolution { + ScopeDef::ModuleDef(hir::ModuleDef::Adt(it)) => it.has_non_default_type_params(db), + ScopeDef::ModuleDef(hir::ModuleDef::TypeAlias(it)) => { + it.has_non_default_type_params(db) } + _ => false, + }; + + if has_non_default_type_params { + cov_mark::hit!(inserts_angle_brackets_for_generics); + item.lookup_by(name.clone()) + .label(SmolStr::from_iter([&name, "<…>"])) + .trigger_call_info() + .insert_snippet(cap, format!("{}<$0>", local_name.display(db, completion.edition))); } } @@ -634,23 +627,24 @@ fn compute_ref_match( if expected_type.could_unify_with(ctx.db, completion_ty) { return None; } - if let Some(expected_without_ref) = &expected_without_ref { - if completion_ty.autoderef(ctx.db).any(|ty| ty == *expected_without_ref) { - cov_mark::hit!(suggest_ref); - let mutability = if expected_type.is_mutable_reference() { - hir::Mutability::Mut - } else { - hir::Mutability::Shared - }; - return Some(CompletionItemRefMode::Reference(mutability)); - } + if let Some(expected_without_ref) = &expected_without_ref + && completion_ty.autoderef(ctx.db).any(|ty| ty == *expected_without_ref) + { + cov_mark::hit!(suggest_ref); + let mutability = if expected_type.is_mutable_reference() { + hir::Mutability::Mut + } else { + hir::Mutability::Shared + }; + return Some(CompletionItemRefMode::Reference(mutability)); } - if let Some(completion_without_ref) = completion_without_ref { - if completion_without_ref == *expected_type && completion_without_ref.is_copy(ctx.db) { - cov_mark::hit!(suggest_deref); - return Some(CompletionItemRefMode::Dereference); - } + if let Some(completion_without_ref) = completion_without_ref + && completion_without_ref == *expected_type + && completion_without_ref.is_copy(ctx.db) + { + cov_mark::hit!(suggest_deref); + return Some(CompletionItemRefMode::Dereference); } None @@ -664,10 +658,10 @@ fn path_ref_match( ) { if let Some(original_path) = &path_ctx.original_path { // At least one char was typed by the user already, in that case look for the original path - if let Some(original_path) = completion.sema.original_ast_node(original_path.clone()) { - if let Some(ref_mode) = compute_ref_match(completion, ty) { - item.ref_match(ref_mode, original_path.syntax().text_range().start()); - } + if let Some(original_path) = completion.sema.original_ast_node(original_path.clone()) + && let Some(ref_mode) = compute_ref_match(completion, ty) + { + item.ref_match(ref_mode, original_path.syntax().text_range().start()); } } else { // completion requested on an empty identifier, there is no path here yet. diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs index f11b3023679ac..707a8aed4fb9e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs @@ -25,10 +25,10 @@ fn render(ctx: RenderContext<'_>, const_: hir::Const) -> Option .detail(detail) .set_relevance(ctx.completion_relevance()); - if let Some(actm) = const_.as_assoc_item(db) { - if let Some(trt) = actm.container_or_implemented_trait(db) { - item.trait_name(trt.name(db).display_no_db(ctx.completion.edition).to_smolstr()); - } + if let Some(actm) = const_.as_assoc_item(db) + && let Some(trt) = actm.container_or_implemented_trait(db) + { + item.trait_name(trt.name(db).display_no_db(ctx.completion.edition).to_smolstr()); } item.insert_text(escaped_name); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs index 7669aec8f535c..c466019f991f7 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs @@ -132,10 +132,10 @@ fn render( super::path_ref_match(completion, path_ctx, &ret_type, &mut item); } FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => { - if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) { - if let Some(ref_mode) = compute_ref_match(completion, &ret_type) { - item.ref_match(ref_mode, original_expr.syntax().text_range().start()); - } + if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) + && let Some(ref_mode) = compute_ref_match(completion, &ret_type) + { + item.ref_match(ref_mode, original_expr.syntax().text_range().start()); } } _ => (), @@ -169,12 +169,10 @@ fn render( item.add_import(import_to_add); } None => { - if let Some(actm) = assoc_item { - if let Some(trt) = actm.container_or_implemented_trait(db) { - item.trait_name( - trt.name(db).display_no_db(ctx.completion.edition).to_smolstr(), - ); - } + if let Some(actm) = assoc_item + && let Some(trt) = actm.container_or_implemented_trait(db) + { + item.trait_name(trt.name(db).display_no_db(ctx.completion.edition).to_smolstr()); } } } @@ -378,15 +376,13 @@ fn params<'db>( ctx.config.callable.as_ref()?; // Don't add parentheses if the expected type is a function reference with the same signature. - if let Some(expected) = ctx.expected_type.as_ref().filter(|e| e.is_fn()) { - if let Some(expected) = expected.as_callable(ctx.db) { - if let Some(completed) = func.ty(ctx.db).as_callable(ctx.db) { - if expected.sig() == completed.sig() { - cov_mark::hit!(no_call_parens_if_fn_ptr_needed); - return None; - } - } - } + if let Some(expected) = ctx.expected_type.as_ref().filter(|e| e.is_fn()) + && let Some(expected) = expected.as_callable(ctx.db) + && let Some(completed) = func.ty(ctx.db).as_callable(ctx.db) + && expected.sig() == completed.sig() + { + cov_mark::hit!(no_call_parens_if_fn_ptr_needed); + return None; } let self_param = if has_dot_receiver || matches!(func_kind, FuncKind::Method(_, Some(_))) { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs index d57feee4fa65e..3fc0f369e5ada 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs @@ -51,10 +51,10 @@ fn render( .detail(detail) .set_relevance(ctx.completion_relevance()); - if let Some(actm) = type_alias.as_assoc_item(db) { - if let Some(trt) = actm.container_or_implemented_trait(db) { - item.trait_name(trt.name(db).display_no_db(ctx.completion.edition).to_smolstr()); - } + if let Some(actm) = type_alias.as_assoc_item(db) + && let Some(trt) = actm.container_or_implemented_trait(db) + { + item.trait_name(trt.name(db).display_no_db(ctx.completion.edition).to_smolstr()); } item.insert_text(escaped_name); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs index a4a140ec57aa0..2a4fcf6a2e5f7 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs @@ -610,18 +610,16 @@ impl<'db> NameClass<'db> { let local = sema.to_def(&ident_pat)?; let pat_parent = ident_pat.syntax().parent(); - if let Some(record_pat_field) = pat_parent.and_then(ast::RecordPatField::cast) { - if record_pat_field.name_ref().is_none() { - if let Some((field, _, adt_subst)) = - sema.resolve_record_pat_field_with_subst(&record_pat_field) - { - return Some(NameClass::PatFieldShorthand { - local_def: local, - field_ref: field, - adt_subst, - }); - } - } + if let Some(record_pat_field) = pat_parent.and_then(ast::RecordPatField::cast) + && record_pat_field.name_ref().is_none() + && let Some((field, _, adt_subst)) = + sema.resolve_record_pat_field_with_subst(&record_pat_field) + { + return Some(NameClass::PatFieldShorthand { + local_def: local, + field_ref: field, + adt_subst, + }); } Some(NameClass::Definition(Definition::Local(local))) } @@ -755,30 +753,27 @@ impl<'db> NameRefClass<'db> { let parent = name_ref.syntax().parent()?; - if let Some(record_field) = ast::RecordExprField::for_field_name(name_ref) { - if let Some((field, local, _, adt_subst)) = + if let Some(record_field) = ast::RecordExprField::for_field_name(name_ref) + && let Some((field, local, _, adt_subst)) = sema.resolve_record_field_with_substitution(&record_field) - { - let res = match local { - None => NameRefClass::Definition(Definition::Field(field), Some(adt_subst)), - Some(local) => NameRefClass::FieldShorthand { - field_ref: field, - local_ref: local, - adt_subst, - }, - }; - return Some(res); - } + { + let res = match local { + None => NameRefClass::Definition(Definition::Field(field), Some(adt_subst)), + Some(local) => { + NameRefClass::FieldShorthand { field_ref: field, local_ref: local, adt_subst } + } + }; + return Some(res); } if let Some(path) = ast::PathSegment::cast(parent.clone()).map(|it| it.parent_path()) { - if path.parent_path().is_none() { - if let Some(macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { - // Only use this to resolve to macro calls for last segments as qualifiers resolve - // to modules below. - if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { - return Some(NameRefClass::Definition(Definition::Macro(macro_def), None)); - } + if path.parent_path().is_none() + && let Some(macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) + { + // Only use this to resolve to macro calls for last segments as qualifiers resolve + // to modules below. + if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { + return Some(NameRefClass::Definition(Definition::Macro(macro_def), None)); } } return sema @@ -820,8 +815,8 @@ impl<'db> NameRefClass<'db> { // ^^^^^ let containing_path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; let resolved = sema.resolve_path(&containing_path)?; - if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved { - if let Some(ty) = tr + if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved + && let Some(ty) = tr .items_with_supertraits(sema.db) .iter() .filter_map(|&assoc| match assoc { @@ -833,7 +828,6 @@ impl<'db> NameRefClass<'db> { // No substitution, this can only occur in type position. return Some(NameRefClass::Definition(Definition::TypeAlias(ty), None)); } - } None }, ast::UseBoundGenericArgs(_) => { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs index 340429037e67a..1e54058dd16ca 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs @@ -70,11 +70,11 @@ pub fn visit_file_defs( }; let mut defs: VecDeque<_> = module.declarations(db).into(); while let Some(def) = defs.pop_front() { - if let ModuleDef::Module(submodule) = def { - if submodule.is_inline(db) { - defs.extend(submodule.declarations(db)); - submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into())); - } + if let ModuleDef::Module(submodule) = def + && submodule.is_inline(db) + { + defs.extend(submodule.declarations(db)); + submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into())); } cb(def.into()); } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs index 813f38380f69d..08cd8f28608ca 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs @@ -97,12 +97,11 @@ impl ImportScope { .map(ImportScopeKind::Module) .map(|kind| ImportScope { kind, required_cfgs }); } else if let Some(has_attrs) = ast::AnyHasAttrs::cast(syntax) { - if block.is_none() { - if let Some(b) = ast::BlockExpr::cast(has_attrs.syntax().clone()) { - if let Some(b) = sema.original_ast_node(b) { - block = b.stmt_list(); - } - } + if block.is_none() + && let Some(b) = ast::BlockExpr::cast(has_attrs.syntax().clone()) + && let Some(b) = sema.original_ast_node(b) + { + block = b.stmt_list(); } if has_attrs .attrs() @@ -349,26 +348,24 @@ fn guess_granularity_from_scope(scope: &ImportScope) -> ImportGranularityGuess { seen_one_style_groups.push((curr_vis.clone(), curr_attrs.clone())); } else if eq_visibility(prev_vis, curr_vis.clone()) && eq_attrs(prev_attrs, curr_attrs.clone()) + && let Some((prev_path, curr_path)) = prev.path().zip(curr.path()) + && let Some((prev_prefix, _)) = common_prefix(&prev_path, &curr_path) { - if let Some((prev_path, curr_path)) = prev.path().zip(curr.path()) { - if let Some((prev_prefix, _)) = common_prefix(&prev_path, &curr_path) { - if prev.use_tree_list().is_none() && curr.use_tree_list().is_none() { - let prefix_c = prev_prefix.qualifiers().count(); - let curr_c = curr_path.qualifiers().count() - prefix_c; - let prev_c = prev_path.qualifiers().count() - prefix_c; - if curr_c == 1 && prev_c == 1 { - // Same prefix, only differing in the last segment and no use tree lists so this has to be of item style. - break ImportGranularityGuess::Item; - } else { - // Same prefix and no use tree list but differs in more than one segment at the end. This might be module style still. - res = ImportGranularityGuess::ModuleOrItem; - } - } else { - // Same prefix with item tree lists, has to be module style as it - // can't be crate style since the trees wouldn't share a prefix then. - break ImportGranularityGuess::Module; - } + if prev.use_tree_list().is_none() && curr.use_tree_list().is_none() { + let prefix_c = prev_prefix.qualifiers().count(); + let curr_c = curr_path.qualifiers().count() - prefix_c; + let prev_c = prev_path.qualifiers().count() - prefix_c; + if curr_c == 1 && prev_c == 1 { + // Same prefix, only differing in the last segment and no use tree lists so this has to be of item style. + break ImportGranularityGuess::Item; + } else { + // Same prefix and no use tree list but differs in more than one segment at the end. This might be module style still. + res = ImportGranularityGuess::ModuleOrItem; } + } else { + // Same prefix with item tree lists, has to be module style as it + // can't be crate style since the trees wouldn't share a prefix then. + break ImportGranularityGuess::Module; } } prev = curr; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs index c94be7e164e27..49f7f63a04a42 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs @@ -244,7 +244,7 @@ pub trait LineIndexDatabase: base_db::RootQueryDb { fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc { let text = db.file_text(file_id).text(db); - Arc::new(LineIndex::new(&text)) + Arc::new(LineIndex::new(text)) } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs index b7432d89c7b77..5d88afec50951 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs @@ -193,13 +193,12 @@ impl<'a> PathTransform<'a> { } } (Either::Right(k), None) => { - if let Some(default) = k.default(db) { - if let Some(default) = + if let Some(default) = k.default(db) + && let Some(default) = &default.display_source_code(db, source_module.into(), false).ok() - { - type_substs.insert(k, make::ty(default).clone_for_update()); - defaulted_params.push(Either::Left(k)); - } + { + type_substs.insert(k, make::ty(default).clone_for_update()); + defaulted_params.push(Either::Left(k)); } } (Either::Left(k), Some(TypeOrConst::Either(v))) => { @@ -221,11 +220,10 @@ impl<'a> PathTransform<'a> { (Either::Left(k), None) => { if let Some(default) = k.default(db, target_module.krate().to_display_target(db)) + && let Some(default) = default.expr() { - if let Some(default) = default.expr() { - const_substs.insert(k, default.syntax().clone_for_update()); - defaulted_params.push(Either::Right(k)); - } + const_substs.insert(k, default.syntax().clone_for_update()); + defaulted_params.push(Either::Right(k)); } } _ => (), // ignore mismatching params @@ -427,14 +425,14 @@ impl Ctx<'_> { } } hir::PathResolution::Def(def) if def.as_assoc_item(self.source_scope.db).is_none() => { - if let hir::ModuleDef::Trait(_) = def { - if matches!(path.segment()?.kind()?, ast::PathSegmentKind::Type { .. }) { - // `speculative_resolve` resolves segments like `` into `Trait`, but just the trait name should - // not be used as the replacement of the original - // segment. - return None; - } + if let hir::ModuleDef::Trait(_) = def + && matches!(path.segment()?.kind()?, ast::PathSegmentKind::Type { .. }) + { + // `speculative_resolve` resolves segments like `` into `Trait`, but just the trait name should + // not be used as the replacement of the original + // segment. + return None; } let cfg = ImportPathConfig { @@ -446,19 +444,17 @@ impl Ctx<'_> { let found_path = self.target_module.find_path(self.source_scope.db, def, cfg)?; let res = mod_path_to_ast(&found_path, self.target_edition).clone_for_update(); let mut res_editor = SyntaxEditor::new(res.syntax().clone_subtree()); - if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) { - if let Some(segment) = res.segment() { - if let Some(old) = segment.generic_arg_list() { - res_editor.replace( - old.syntax(), - args.clone_subtree().syntax().clone_for_update(), - ) - } else { - res_editor.insert( - syntax_editor::Position::last_child_of(segment.syntax()), - args.clone_subtree().syntax().clone_for_update(), - ); - } + if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) + && let Some(segment) = res.segment() + { + if let Some(old) = segment.generic_arg_list() { + res_editor + .replace(old.syntax(), args.clone_subtree().syntax().clone_for_update()) + } else { + res_editor.insert( + syntax_editor::Position::last_child_of(segment.syntax()), + args.clone_subtree().syntax().clone_for_update(), + ); } } let res = res_editor.finish().new_root().clone(); @@ -485,27 +481,27 @@ impl Ctx<'_> { .ok()?; let ast_ty = make::ty(ty_str).clone_for_update(); - if let Some(adt) = ty.as_adt() { - if let ast::Type::PathType(path_ty) = &ast_ty { - let cfg = ImportPathConfig { - prefer_no_std: false, - prefer_prelude: true, - prefer_absolute: false, - allow_unstable: true, - }; - let found_path = self.target_module.find_path( - self.source_scope.db, - ModuleDef::from(adt), - cfg, - )?; - - if let Some(qual) = - mod_path_to_ast(&found_path, self.target_edition).qualifier() - { - let res = make::path_concat(qual, path_ty.path()?).clone_for_update(); - editor.replace(path.syntax(), res.syntax()); - return Some(()); - } + if let Some(adt) = ty.as_adt() + && let ast::Type::PathType(path_ty) = &ast_ty + { + let cfg = ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + allow_unstable: true, + }; + let found_path = self.target_module.find_path( + self.source_scope.db, + ModuleDef::from(adt), + cfg, + )?; + + if let Some(qual) = + mod_path_to_ast(&found_path, self.target_edition).qualifier() + { + let res = make::path_concat(qual, path_ty.path()?).clone_for_update(); + editor.replace(path.syntax(), res.syntax()); + return Some(()); } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs index 4e737e27f0505..424b27a398b20 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs @@ -442,17 +442,17 @@ fn source_edit_from_name( name: &ast::Name, new_name: &dyn Display, ) -> bool { - if ast::RecordPatField::for_field_name(name).is_some() { - if let Some(ident_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) { - cov_mark::hit!(rename_record_pat_field_name_split); - // Foo { ref mut field } -> Foo { new_name: ref mut field } - // ^ insert `new_name: ` - - // FIXME: instead of splitting the shorthand, recursively trigger a rename of the - // other name https://github.com/rust-lang/rust-analyzer/issues/6547 - edit.insert(ident_pat.syntax().text_range().start(), format!("{new_name}: ")); - return true; - } + if ast::RecordPatField::for_field_name(name).is_some() + && let Some(ident_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) + { + cov_mark::hit!(rename_record_pat_field_name_split); + // Foo { ref mut field } -> Foo { new_name: ref mut field } + // ^ insert `new_name: ` + + // FIXME: instead of splitting the shorthand, recursively trigger a rename of the + // other name https://github.com/rust-lang/rust-analyzer/issues/6547 + edit.insert(ident_pat.syntax().text_range().start(), format!("{new_name}: ")); + return true; } false diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 9cf0bcf919011..abd4dc8300b39 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -295,10 +295,10 @@ impl Definition { } // def is crate root - if let &Definition::Module(module) = self { - if module.is_crate_root() { - return SearchScope::reverse_dependencies(db, module.krate()); - } + if let &Definition::Module(module) = self + && module.is_crate_root() + { + return SearchScope::reverse_dependencies(db, module.krate()); } let module = match self.module(db) { @@ -487,9 +487,9 @@ impl<'a> FindUsages<'a> { scope.entries.iter().map(|(&file_id, &search_range)| { let text = db.file_text(file_id.file_id(db)).text(db); let search_range = - search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); + search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&**text))); - (text, file_id, search_range) + (text.clone(), file_id, search_range) }) } @@ -683,51 +683,47 @@ impl<'a> FindUsages<'a> { } } else if let Some(alias) = usage.ancestors().find_map(ast::TypeAlias::cast) + && let Some(name) = alias.name() + && seen + .insert(InFileWrapper::new(file_id, name.syntax().text_range())) { - if let Some(name) = alias.name() { - if seen.insert(InFileWrapper::new( - file_id, - name.syntax().text_range(), - )) { - if let Some(def) = is_alias(&alias) { - cov_mark::hit!(container_type_alias); - insert_type_alias( - sema.db, - &mut to_process, - name.text().as_str(), - def.into(), - ); - } else { - cov_mark::hit!(same_name_different_def_type_alias); - } - } + if let Some(def) = is_alias(&alias) { + cov_mark::hit!(container_type_alias); + insert_type_alias( + sema.db, + &mut to_process, + name.text().as_str(), + def.into(), + ); + } else { + cov_mark::hit!(same_name_different_def_type_alias); } } // We need to account for `Self`. It can only refer to our type inside an impl. let impl_ = 'impl_: { for ancestor in usage.ancestors() { - if let Some(parent) = ancestor.parent() { - if let Some(parent) = ast::Impl::cast(parent) { - // Only if the GENERIC_PARAM_LIST is directly under impl, otherwise it may be in the self ty. - if matches!( - ancestor.kind(), - SyntaxKind::ASSOC_ITEM_LIST - | SyntaxKind::WHERE_CLAUSE - | SyntaxKind::GENERIC_PARAM_LIST - ) { - break; - } - if parent - .trait_() - .is_some_and(|trait_| *trait_.syntax() == ancestor) - { - break; - } - - // Otherwise, found an impl where its self ty may be our type. - break 'impl_ Some(parent); + if let Some(parent) = ancestor.parent() + && let Some(parent) = ast::Impl::cast(parent) + { + // Only if the GENERIC_PARAM_LIST is directly under impl, otherwise it may be in the self ty. + if matches!( + ancestor.kind(), + SyntaxKind::ASSOC_ITEM_LIST + | SyntaxKind::WHERE_CLAUSE + | SyntaxKind::GENERIC_PARAM_LIST + ) { + break; + } + if parent + .trait_() + .is_some_and(|trait_| *trait_.syntax() == ancestor) + { + break; } + + // Otherwise, found an impl where its self ty may be our type. + break 'impl_ Some(parent); } } None @@ -858,14 +854,7 @@ impl<'a> FindUsages<'a> { &finder, name, is_possibly_self.into_iter().map(|position| { - ( - self.sema - .db - .file_text(position.file_id.file_id(self.sema.db)) - .text(self.sema.db), - position.file_id, - position.range, - ) + (position.file_text(self.sema.db).clone(), position.file_id, position.range) }), |path, name_position| { let has_self = path @@ -1071,12 +1060,12 @@ impl<'a> FindUsages<'a> { let file_text = sema.db.file_text(file_id.file_id(self.sema.db)); let text = file_text.text(sema.db); let search_range = - search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); + search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&**text))); let tree = LazyCell::new(|| sema.parse(file_id).syntax().clone()); let finder = &Finder::new("self"); - for offset in Self::match_indices(&text, finder, search_range) { + for offset in Self::match_indices(text, finder, search_range) { for name_ref in Self::find_nodes(sema, "self", file_id, &tree, offset) .filter_map(ast::NameRef::cast) { @@ -1356,11 +1345,10 @@ impl ReferenceCategory { if matches!(expr.op_kind()?, ast::BinaryOp::Assignment { .. }) { // If the variable or field ends on the LHS's end then it's a Write // (covers fields and locals). FIXME: This is not terribly accurate. - if let Some(lhs) = expr.lhs() { - if lhs.syntax().text_range().end() == r.syntax().text_range().end() { + if let Some(lhs) = expr.lhs() + && lhs.syntax().text_range().end() == r.syntax().text_range().end() { return Some(ReferenceCategory::WRITE) } - } } Some(ReferenceCategory::READ) }, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs index c15cade84a502..9c4e6f5cbf82f 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs @@ -252,10 +252,10 @@ impl SymbolIndex { let mut last_batch_start = 0; for idx in 0..symbols.len() { - if let Some(next_symbol) = symbols.get(idx + 1) { - if cmp(&symbols[last_batch_start], next_symbol) == Ordering::Equal { - continue; - } + if let Some(next_symbol) = symbols.get(idx + 1) + && cmp(&symbols[last_batch_start], next_symbol) == Ordering::Equal + { + continue; } let start = last_batch_start; @@ -371,10 +371,10 @@ impl Query { if self.exclude_imports && symbol.is_import { continue; } - if self.mode.check(&self.query, self.case_sensitive, symbol_name) { - if let Some(b) = cb(symbol).break_value() { - return Some(b); - } + if self.mode.check(&self.query, self.case_sensitive, symbol_name) + && let Some(b) = cb(symbol).break_value() + { + return Some(b); } } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs index 7e8c921d9ed39..1d4d8decf5413 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs @@ -230,11 +230,11 @@ pub fn lex_format_specifiers( skip_char_and_emit(&mut chars, FormatSpecifier::Close, &mut callback); } continue; - } else if let '}' = first_char { - if let Some((_, '}')) = chars.peek() { - // Escaped format specifier, `}}` - read_escaped_format_specifier(&mut chars, &mut callback); - } + } else if let '}' = first_char + && let Some((_, '}')) = chars.peek() + { + // Escaped format specifier, `}}` + read_escaped_format_specifier(&mut chars, &mut callback); } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs index bdff64dd0812c..cefd8fd49676e 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -79,14 +79,13 @@ pub fn preorder_expr_with_ctx_checker( continue; } }; - if let Some(let_stmt) = node.parent().and_then(ast::LetStmt::cast) { - if let_stmt.initializer().map(|it| it.syntax() != &node).unwrap_or(true) - && let_stmt.let_else().map(|it| it.syntax() != &node).unwrap_or(true) - { - // skipping potential const pat expressions in let statements - preorder.skip_subtree(); - continue; - } + if let Some(let_stmt) = node.parent().and_then(ast::LetStmt::cast) + && let_stmt.initializer().map(|it| it.syntax() != &node).unwrap_or(true) + && let_stmt.let_else().map(|it| it.syntax() != &node).unwrap_or(true) + { + // skipping potential const pat expressions in let statements + preorder.skip_subtree(); + continue; } match ast::Stmt::cast(node.clone()) { @@ -306,10 +305,10 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { Some(ast::BlockModifier::AsyncGen(_)) => (), None => (), } - if let Some(stmt_list) = b.stmt_list() { - if let Some(e) = stmt_list.tail_expr() { - for_each_tail_expr(&e, cb); - } + if let Some(stmt_list) = b.stmt_list() + && let Some(e) = stmt_list.tail_expr() + { + for_each_tail_expr(&e, cb); } } ast::Expr::IfExpr(if_) => { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs index f63cd92694b35..a91d436afcfbb 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs @@ -16,17 +16,17 @@ pub fn use_trivial_constructor( ) -> Option { match ty.as_adt() { Some(hir::Adt::Enum(x)) => { - if let &[variant] = &*x.variants(db) { - if variant.kind(db) == hir::StructKind::Unit { - let path = make::path_qualified( - path, - make::path_segment(make::name_ref( - &variant.name(db).display_no_db(edition).to_smolstr(), - )), - ); + if let &[variant] = &*x.variants(db) + && variant.kind(db) == hir::StructKind::Unit + { + let path = make::path_qualified( + path, + make::path_segment(make::name_ref( + &variant.name(db).display_no_db(edition).to_smolstr(), + )), + ); - return Some(make::expr_path(path)); - } + return Some(make::expr_path(path)); } } Some(hir::Adt::Struct(x)) if x.kind(db) == StructKind::Unit => { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index bf7dddacd8c59..742d614bc5673 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -148,37 +148,27 @@ pub(crate) fn json_in_items( allow_unstable: true, }; - if !scope_has("Serialize") { - if let Some(PathResolution::Def(it)) = serialize_resolved { - if let Some(it) = current_module.find_use_path( - sema.db, - it, - config.insert_use.prefix_kind, - cfg, - ) { - insert_use( - &scope, - mod_path_to_ast(&it, edition), - &config.insert_use, - ); - } - } + if !scope_has("Serialize") + && let Some(PathResolution::Def(it)) = serialize_resolved + && let Some(it) = current_module.find_use_path( + sema.db, + it, + config.insert_use.prefix_kind, + cfg, + ) + { + insert_use(&scope, mod_path_to_ast(&it, edition), &config.insert_use); } - if !scope_has("Deserialize") { - if let Some(PathResolution::Def(it)) = deserialize_resolved { - if let Some(it) = current_module.find_use_path( - sema.db, - it, - config.insert_use.prefix_kind, - cfg, - ) { - insert_use( - &scope, - mod_path_to_ast(&it, edition), - &config.insert_use, - ); - } - } + if !scope_has("Deserialize") + && let Some(PathResolution::Def(it)) = deserialize_resolved + && let Some(it) = current_module.find_use_path( + sema.db, + it, + config.insert_use.prefix_kind, + cfg, + ) + { + insert_use(&scope, mod_path_to_ast(&it, edition), &config.insert_use); } let mut sc = scb.finish(); sc.insert_source_edit(vfs_file_id, edit.finish()); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs index 7da799e0d490b..893bfca6a1298 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -227,12 +227,11 @@ fn get_default_constructor( // Look for a ::new() associated function let has_new_func = ty .iterate_assoc_items(ctx.sema.db, krate, |assoc_item| { - if let AssocItem::Function(func) = assoc_item { - if func.name(ctx.sema.db) == sym::new - && func.assoc_fn_params(ctx.sema.db).is_empty() - { - return Some(()); - } + if let AssocItem::Function(func) = assoc_item + && func.name(ctx.sema.db) == sym::new + && func.assoc_fn_params(ctx.sema.db).is_empty() + { + return Some(()); } None diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 8831efa311720..6e30bf92dbaa1 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -12,14 +12,14 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Option let root = ctx.sema.db.parse_or_expand(d.span.file_id); let node = d.span.value.to_node(&root); let mut span = d.span; - if let Some(parent) = node.parent() { - if ast::BinExpr::can_cast(parent.kind()) { - // In case of an assignment, the diagnostic is provided on the variable name. - // We want to expand it to include the whole assignment, but only when this - // is an ordinary assignment, not a destructuring assignment. So, the direct - // parent is an assignment expression. - span = d.span.with_value(SyntaxNodePtr::new(&parent)); - } + if let Some(parent) = node.parent() + && ast::BinExpr::can_cast(parent.kind()) + { + // In case of an assignment, the diagnostic is provided on the variable name. + // We want to expand it to include the whole assignment, but only when this + // is an ordinary assignment, not a destructuring assignment. So, the direct + // parent is an assignment expression. + span = d.span.with_value(SyntaxNodePtr::new(&parent)); }; let fixes = (|| { @@ -73,10 +73,10 @@ pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Op let ast = source.syntax(); let Some(mut_token) = token(ast, T![mut]) else { continue }; edit_builder.delete(mut_token.text_range()); - if let Some(token) = mut_token.next_token() { - if token.kind() == SyntaxKind::WHITESPACE { - edit_builder.delete(token.text_range()); - } + if let Some(token) = mut_token.next_token() + && token.kind() == SyntaxKind::WHITESPACE + { + edit_builder.delete(token.text_range()); } } let edit = edit_builder.finish(); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs index d96c658d7b048..3a6e480f55ed4 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -231,13 +231,13 @@ fn make_fixes( // If there's an existing `mod m;` statement matching the new one, don't emit a fix (it's // probably `#[cfg]`d out). for item in items.clone() { - if let ast::Item::Module(m) = item { - if let Some(name) = m.name() { - if m.item_list().is_none() && name.to_string() == new_mod_name { - cov_mark::hit!(unlinked_file_skip_fix_when_mod_already_exists); - return None; - } - } + if let ast::Item::Module(m) = item + && let Some(name) = m.name() + && m.item_list().is_none() + && name.to_string() == new_mod_name + { + cov_mark::hit!(unlinked_file_skip_fix_when_mod_already_exists); + return None; } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index 72bd66d1c8bb7..a1db92641f5ee 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -568,10 +568,10 @@ fn handle_diag_from_macros( diag.fixes = None; // All Clippy lints report in macros, see https://github.com/rust-lang/rust-clippy/blob/903293b199364/declare_clippy_lint/src/lib.rs#L172. - if let DiagnosticCode::RustcLint(lint) = diag.code { - if !LINTS_TO_REPORT_IN_EXTERNAL_MACROS.contains(lint) { - return false; - } + if let DiagnosticCode::RustcLint(lint) = diag.code + && !LINTS_TO_REPORT_IN_EXTERNAL_MACROS.contains(lint) + { + return false; }; } true @@ -760,35 +760,35 @@ fn cfg_attr_lint_attrs( } while let Some(value) = iter.next() { - if let Some(token) = value.as_token() { - if token.kind() == SyntaxKind::IDENT { - let severity = match token.text() { - "allow" | "expect" => Some(Severity::Allow), - "warn" => Some(Severity::Warning), - "forbid" | "deny" => Some(Severity::Error), - "cfg_attr" => { - if let Some(NodeOrToken::Node(value)) = iter.next() { - cfg_attr_lint_attrs(sema, &value, lint_attrs); - } - None - } - _ => None, - }; - if let Some(severity) = severity { - let lints = iter.next(); - if let Some(NodeOrToken::Node(lints)) = lints { - lint_attrs.push((severity, lints)); + if let Some(token) = value.as_token() + && token.kind() == SyntaxKind::IDENT + { + let severity = match token.text() { + "allow" | "expect" => Some(Severity::Allow), + "warn" => Some(Severity::Warning), + "forbid" | "deny" => Some(Severity::Error), + "cfg_attr" => { + if let Some(NodeOrToken::Node(value)) = iter.next() { + cfg_attr_lint_attrs(sema, &value, lint_attrs); } + None + } + _ => None, + }; + if let Some(severity) = severity { + let lints = iter.next(); + if let Some(NodeOrToken::Node(lints)) = lints { + lint_attrs.push((severity, lints)); } } } } - if prev_len != lint_attrs.len() { - if let Some(false) | None = sema.check_cfg_attr(value) { - // Discard the attributes when the condition is false. - lint_attrs.truncate(prev_len); - } + if prev_len != lint_attrs.len() + && let Some(false) | None = sema.check_cfg_attr(value) + { + // Discard the attributes when the condition is false. + lint_attrs.truncate(prev_len); } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs index 4e4bd47e1c2f2..181993154e59f 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs @@ -229,7 +229,7 @@ pub(crate) fn check_diagnostics_with_config( let line_index = db.line_index(file_id); let mut actual = annotations.remove(&file_id).unwrap_or_default(); - let mut expected = extract_annotations(&db.file_text(file_id).text(&db)); + let mut expected = extract_annotations(db.file_text(file_id).text(&db)); expected.sort_by_key(|(range, s)| (range.start(), s.clone())); actual.sort_by_key(|(range, s)| (range.start(), s.clone())); // FIXME: We should panic on duplicates instead, but includes currently cause us to report diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs index e4b20f3f1aad6..43ad12c1f699a 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs @@ -186,7 +186,7 @@ impl<'db> MatchFinder<'db> { replacing::matches_to_edit( self.sema.db, &matches, - &self.sema.db.file_text(file_id).text(self.sema.db), + self.sema.db.file_text(file_id).text(self.sema.db), &self.rules, ), ) @@ -228,7 +228,7 @@ impl<'db> MatchFinder<'db> { let file = self.sema.parse(file_id); let mut res = Vec::new(); let file_text = self.sema.db.file_text(file_id.file_id(self.sema.db)).text(self.sema.db); - let mut remaining_text = &*file_text; + let mut remaining_text = &**file_text; let mut base = 0; let len = snippet.len() as u32; while let Some(offset) = remaining_text.find(snippet) { @@ -283,17 +283,16 @@ impl<'db> MatchFinder<'db> { node: node.clone(), }); } - } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) { - if let Some(expanded) = self.sema.expand_macro_call(¯o_call) { - if let Some(tt) = macro_call.token_tree() { - self.output_debug_for_nodes_at_range( - &expanded.value, - range, - &Some(self.sema.original_range(tt.syntax())), - out, - ); - } - } + } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) + && let Some(expanded) = self.sema.expand_macro_call(¯o_call) + && let Some(tt) = macro_call.token_tree() + { + self.output_debug_for_nodes_at_range( + &expanded.value, + range, + &Some(self.sema.original_range(tt.syntax())), + out, + ); } self.output_debug_for_nodes_at_range(&node, range, restrict_range, out); } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs index b350315ba5489..f21132c297ee8 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs @@ -156,12 +156,11 @@ impl<'db, 'sema> Matcher<'db, 'sema> { /// processing a macro expansion and we want to fail the match if we're working with a node that /// didn't originate from the token tree of the macro call. fn validate_range(&self, range: &FileRange) -> Result<(), MatchFailed> { - if let Some(restrict_range) = &self.restrict_range { - if restrict_range.file_id != range.file_id - || !restrict_range.range.contains_range(range.range) - { - fail_match!("Node originated from a macro"); - } + if let Some(restrict_range) = &self.restrict_range + && (restrict_range.file_id != range.file_id + || !restrict_range.range.contains_range(range.range)) + { + fail_match!("Node originated from a macro"); } Ok(()) } @@ -404,30 +403,27 @@ impl<'db, 'sema> Matcher<'db, 'sema> { // Build a map keyed by field name. let mut fields_by_name: FxHashMap = FxHashMap::default(); for child in code.children() { - if let Some(record) = ast::RecordExprField::cast(child.clone()) { - if let Some(name) = record.field_name() { - fields_by_name.insert(name.text().into(), child.clone()); - } + if let Some(record) = ast::RecordExprField::cast(child.clone()) + && let Some(name) = record.field_name() + { + fields_by_name.insert(name.text().into(), child.clone()); } } for p in pattern.children_with_tokens() { - if let SyntaxElement::Node(p) = p { - if let Some(name_element) = p.first_child_or_token() { - if self.get_placeholder(&name_element).is_some() { - // If the pattern is using placeholders for field names then order - // independence doesn't make sense. Fall back to regular ordered - // matching. - return self.attempt_match_node_children(phase, pattern, code); - } - if let Some(ident) = only_ident(name_element) { - let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| { - match_error!( - "Placeholder has record field '{}', but code doesn't", - ident - ) - })?; - self.attempt_match_node(phase, &p, &code_record)?; - } + if let SyntaxElement::Node(p) = p + && let Some(name_element) = p.first_child_or_token() + { + if self.get_placeholder(&name_element).is_some() { + // If the pattern is using placeholders for field names then order + // independence doesn't make sense. Fall back to regular ordered + // matching. + return self.attempt_match_node_children(phase, pattern, code); + } + if let Some(ident) = only_ident(name_element) { + let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| { + match_error!("Placeholder has record field '{}', but code doesn't", ident) + })?; + self.attempt_match_node(phase, &p, &code_record)?; } } } @@ -476,14 +472,13 @@ impl<'db, 'sema> Matcher<'db, 'sema> { } } SyntaxElement::Node(n) => { - if let Some(first_token) = n.first_token() { - if Some(first_token.text()) == next_pattern_token.as_deref() { - if let Some(SyntaxElement::Node(p)) = pattern.next() { - // We have a subtree that starts with the next token in our pattern. - self.attempt_match_token_tree(phase, &p, n)?; - break; - } - } + if let Some(first_token) = n.first_token() + && Some(first_token.text()) == next_pattern_token.as_deref() + && let Some(SyntaxElement::Node(p)) = pattern.next() + { + // We have a subtree that starts with the next token in our pattern. + self.attempt_match_token_tree(phase, &p, n)?; + break; } } }; @@ -562,23 +557,22 @@ impl<'db, 'sema> Matcher<'db, 'sema> { let deref_count = self.check_expr_type(pattern_type, expr)?; let pattern_receiver = pattern_args.next(); self.attempt_match_opt(phase, pattern_receiver.clone(), code.receiver())?; - if let Phase::Second(match_out) = phase { - if let Some(placeholder_value) = pattern_receiver + if let Phase::Second(match_out) = phase + && let Some(placeholder_value) = pattern_receiver .and_then(|n| self.get_placeholder_for_node(n.syntax())) .and_then(|placeholder| { match_out.placeholder_values.get_mut(&placeholder.ident) }) - { - placeholder_value.autoderef_count = deref_count; - placeholder_value.autoref_kind = self - .sema - .resolve_method_call_as_callable(code) - .and_then(|callable| { - let (self_param, _) = callable.receiver_param(self.sema.db)?; - Some(self.sema.source(self_param)?.value.kind()) - }) - .unwrap_or(ast::SelfParamKind::Owned); - } + { + placeholder_value.autoderef_count = deref_count; + placeholder_value.autoref_kind = self + .sema + .resolve_method_call_as_callable(code) + .and_then(|callable| { + let (self_param, _) = callable.receiver_param(self.sema.db)?; + Some(self.sema.source(self_param)?.value.kind()) + }) + .unwrap_or(ast::SelfParamKind::Owned); } } } else { @@ -698,12 +692,11 @@ impl Phase<'_> { } fn record_ignored_comments(&mut self, token: &SyntaxToken) { - if token.kind() == SyntaxKind::COMMENT { - if let Phase::Second(match_out) = self { - if let Some(comment) = ast::Comment::cast(token.clone()) { - match_out.ignored_comments.push(comment); - } - } + if token.kind() == SyntaxKind::COMMENT + && let Phase::Second(match_out) = self + && let Some(comment) = ast::Comment::cast(token.clone()) + { + match_out.ignored_comments.push(comment); } } } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs index 752edd6535a63..16287a439c358 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs @@ -112,12 +112,12 @@ impl<'db> ReplacementRenderer<'_, 'db> { self.out.push_str(&mod_path.display(self.db, self.edition).to_string()); // Emit everything except for the segment's name-ref, since we already effectively // emitted that as part of `mod_path`. - if let Some(path) = ast::Path::cast(node.clone()) { - if let Some(segment) = path.segment() { - for node_or_token in segment.syntax().children_with_tokens() { - if node_or_token.kind() != SyntaxKind::NAME_REF { - self.render_node_or_token(&node_or_token); - } + if let Some(path) = ast::Path::cast(node.clone()) + && let Some(segment) = path.segment() + { + for node_or_token in segment.syntax().children_with_tokens() { + if node_or_token.kind() != SyntaxKind::NAME_REF { + self.render_node_or_token(&node_or_token); } } } @@ -242,15 +242,15 @@ fn token_is_method_call_receiver(token: &SyntaxToken) -> bool { } fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option { - if ast::Expr::can_cast(kind) { - if let Ok(expr) = fragments::expr(code) { - return Some(expr); - } + if ast::Expr::can_cast(kind) + && let Ok(expr) = fragments::expr(code) + { + return Some(expr); } - if ast::Item::can_cast(kind) { - if let Ok(item) = fragments::item(code) { - return Some(item); - } + if ast::Item::can_cast(kind) + && let Ok(item) = fragments::item(code) + { + return Some(item); } None } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs index 8f28a1cd3a623..a4e2cfbaee27d 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs @@ -83,21 +83,17 @@ impl<'db> Resolver<'_, 'db> { let ufcs_function_calls = resolved_paths .iter() .filter_map(|(path_node, resolved)| { - if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) { - if let Some(call_expr) = ast::CallExpr::cast(grandparent.clone()) { - if let hir::PathResolution::Def(hir::ModuleDef::Function(function)) = - resolved.resolution - { - if function.as_assoc_item(self.resolution_scope.scope.db).is_some() { - let qualifier_type = - self.resolution_scope.qualifier_type(path_node); - return Some(( - grandparent, - UfcsCallInfo { call_expr, function, qualifier_type }, - )); - } - } - } + if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) + && let Some(call_expr) = ast::CallExpr::cast(grandparent.clone()) + && let hir::PathResolution::Def(hir::ModuleDef::Function(function)) = + resolved.resolution + && function.as_assoc_item(self.resolution_scope.scope.db).is_some() + { + let qualifier_type = self.resolution_scope.qualifier_type(path_node); + return Some(( + grandparent, + UfcsCallInfo { call_expr, function, qualifier_type }, + )); } None }) @@ -153,12 +149,11 @@ impl<'db> Resolver<'_, 'db> { /// Returns whether `path` contains a placeholder, but ignores any placeholders within type /// arguments. fn path_contains_placeholder(&self, path: &ast::Path) -> bool { - if let Some(segment) = path.segment() { - if let Some(name_ref) = segment.name_ref() { - if self.placeholders_by_stand_in.contains_key(name_ref.text().as_str()) { - return true; - } - } + if let Some(segment) = path.segment() + && let Some(name_ref) = segment.name_ref() + && self.placeholders_by_stand_in.contains_key(name_ref.text().as_str()) + { + return true; } if let Some(qualifier) = path.qualifier() { return self.path_contains_placeholder(&qualifier); @@ -252,14 +247,12 @@ impl<'db> ResolutionScope<'db> { fn qualifier_type(&self, path: &SyntaxNode) -> Option> { use syntax::ast::AstNode; - if let Some(path) = ast::Path::cast(path.clone()) { - if let Some(qualifier) = path.qualifier() { - if let Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) = - self.resolve_path(&qualifier) - { - return Some(adt.ty(self.scope.db)); - } - } + if let Some(path) = ast::Path::cast(path.clone()) + && let Some(qualifier) = path.qualifier() + && let Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) = + self.resolve_path(&qualifier) + { + return Some(adt.ty(self.scope.db)); } None } @@ -299,11 +292,11 @@ fn pick_node_for_resolution(node: SyntaxNode) -> SyntaxNode { /// Returns whether `path` or any of its qualifiers contains type arguments. fn path_contains_type_arguments(path: Option) -> bool { if let Some(path) = path { - if let Some(segment) = path.segment() { - if segment.generic_arg_list().is_some() { - cov_mark::hit!(type_arguments_within_path); - return true; - } + if let Some(segment) = path.segment() + && segment.generic_arg_list().is_some() + { + cov_mark::hit!(type_arguments_within_path); + return true; } return path_contains_type_arguments(path.qualifier()); } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs index 99a98fb2a7130..72f857ceda903 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs @@ -187,16 +187,15 @@ impl<'db> MatchFinder<'db> { self.try_add_match(rule, code, restrict_range, matches_out); // If we've got a macro call, we already tried matching it pre-expansion, which is the only // way to match the whole macro, now try expanding it and matching the expansion. - if let Some(macro_call) = ast::MacroCall::cast(code.clone()) { - if let Some(expanded) = self.sema.expand_macro_call(¯o_call) { - if let Some(tt) = macro_call.token_tree() { - // When matching within a macro expansion, we only want to allow matches of - // nodes that originated entirely from within the token tree of the macro call. - // i.e. we don't want to match something that came from the macro itself. - if let Some(range) = self.sema.original_range_opt(tt.syntax()) { - self.slow_scan_node(&expanded.value, rule, &Some(range), matches_out); - } - } + if let Some(macro_call) = ast::MacroCall::cast(code.clone()) + && let Some(expanded) = self.sema.expand_macro_call(¯o_call) + && let Some(tt) = macro_call.token_tree() + { + // When matching within a macro expansion, we only want to allow matches of + // nodes that originated entirely from within the token tree of the macro call. + // i.e. we don't want to match something that came from the macro itself. + if let Some(range) = self.sema.original_range_opt(tt.syntax()) { + self.slow_scan_node(&expanded.value, rule, &Some(range), matches_out); } } for child in code.children() { @@ -241,10 +240,10 @@ impl<'db> MatchFinder<'db> { /// Returns whether we support matching within `node` and all of its ancestors. fn is_search_permitted_ancestors(node: &SyntaxNode) -> bool { - if let Some(parent) = node.parent() { - if !is_search_permitted_ancestors(&parent) { - return false; - } + if let Some(parent) = node.parent() + && !is_search_permitted_ancestors(&parent) + { + return false; } is_search_permitted(node) } diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations.rs b/src/tools/rust-analyzer/crates/ide/src/annotations.rs index 05196ac98c03e..dec1889926dad 100644 --- a/src/tools/rust-analyzer/crates/ide/src/annotations.rs +++ b/src/tools/rust-analyzer/crates/ide/src/annotations.rs @@ -159,10 +159,10 @@ pub(crate) fn annotations( node.value.syntax().text_range(), Some(name), ); - if res.call_site.0.file_id == source_file_id { - if let Some(name_range) = res.call_site.1 { - return Some((res.call_site.0.range, Some(name_range))); - } + if res.call_site.0.file_id == source_file_id + && let Some(name_range) = res.call_site.1 + { + return Some((res.call_site.0.range, Some(name_range))); } }; // otherwise try upmapping the entire node out of attributes diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs index f31886b969766..ad84eacfb3e88 100644 --- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs +++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs @@ -96,14 +96,14 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< let (name, expanded, kind) = loop { let node = anc.next()?; - if let Some(item) = ast::Item::cast(node.clone()) { - if let Some(def) = sema.resolve_attr_macro_call(&item) { - break ( - def.name(db).display(db, file_id.edition(db)).to_string(), - expand_macro_recur(&sema, &item, &mut error, &mut span_map, TextSize::new(0))?, - SyntaxKind::MACRO_ITEMS, - ); - } + if let Some(item) = ast::Item::cast(node.clone()) + && let Some(def) = sema.resolve_attr_macro_call(&item) + { + break ( + def.name(db).display(db, file_id.edition(db)).to_string(), + expand_macro_recur(&sema, &item, &mut error, &mut span_map, TextSize::new(0))?, + SyntaxKind::MACRO_ITEMS, + ); } if let Some(mac) = ast::MacroCall::cast(node) { let mut name = mac.path()?.segment()?.name_ref()?.to_string(); diff --git a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs index a374f9752fcfa..2926384c40786 100644 --- a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs +++ b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs @@ -81,10 +81,10 @@ fn try_extend_selection( if token.text_range() != range { return Some(token.text_range()); } - if let Some(comment) = ast::Comment::cast(token.clone()) { - if let Some(range) = extend_comments(comment) { - return Some(range); - } + if let Some(comment) = ast::Comment::cast(token.clone()) + && let Some(range) = extend_comments(comment) + { + return Some(range); } token.parent()? } @@ -92,12 +92,11 @@ fn try_extend_selection( }; // if we are in single token_tree, we maybe live in macro or attr - if node.kind() == TOKEN_TREE { - if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { - if let Some(range) = extend_tokens_from_range(sema, macro_call, range) { - return Some(range); - } - } + if node.kind() == TOKEN_TREE + && let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) + && let Some(range) = extend_tokens_from_range(sema, macro_call, range) + { + return Some(range); } if node.text_range() != range { @@ -106,10 +105,10 @@ fn try_extend_selection( let node = shallowest_node(&node); - if node.parent().is_some_and(|n| list_kinds.contains(&n.kind())) { - if let Some(range) = extend_list_item(&node) { - return Some(range); - } + if node.parent().is_some_and(|n| list_kinds.contains(&n.kind())) + && let Some(range) = extend_list_item(&node) + { + return Some(range); } node.parent().map(|it| it.text_range()) @@ -221,19 +220,20 @@ fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start(); let ws_suffix = &ws_text[suffix]; let ws_prefix = &ws_text[prefix]; - if ws_text.contains('\n') && !ws_suffix.contains('\n') { - if let Some(node) = ws.next_sibling_or_token() { - let start = match ws_prefix.rfind('\n') { - Some(idx) => ws.text_range().start() + TextSize::from((idx + 1) as u32), - None => node.text_range().start(), - }; - let end = if root.text().char_at(node.text_range().end()) == Some('\n') { - node.text_range().end() + TextSize::of('\n') - } else { - node.text_range().end() - }; - return TextRange::new(start, end); - } + if ws_text.contains('\n') + && !ws_suffix.contains('\n') + && let Some(node) = ws.next_sibling_or_token() + { + let start = match ws_prefix.rfind('\n') { + Some(idx) => ws.text_range().start() + TextSize::from((idx + 1) as u32), + None => node.text_range().start(), + }; + let end = if root.text().char_at(node.text_range().end()) == Some('\n') { + node.text_range().end() + TextSize::of('\n') + } else { + node.text_range().end() + }; + return TextRange::new(start, end); } ws.text_range() } diff --git a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs index 1901bcc797e77..ac64413effebf 100755 --- a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs +++ b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs @@ -61,30 +61,29 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { }; if is_multiline { // for the func with multiline param list - if matches!(element.kind(), FN) { - if let NodeOrToken::Node(node) = &element { - if let Some(fn_node) = ast::Fn::cast(node.clone()) { - if !fn_node - .param_list() - .map(|param_list| param_list.syntax().text().contains_char('\n')) - .unwrap_or(false) - { - continue; - } + if matches!(element.kind(), FN) + && let NodeOrToken::Node(node) = &element + && let Some(fn_node) = ast::Fn::cast(node.clone()) + { + if !fn_node + .param_list() + .map(|param_list| param_list.syntax().text().contains_char('\n')) + .unwrap_or(false) + { + continue; + } - if fn_node.body().is_some() { - // Get the actual start of the function (excluding doc comments) - let fn_start = fn_node - .fn_token() - .map(|token| token.text_range().start()) - .unwrap_or(node.text_range().start()); - res.push(Fold { - range: TextRange::new(fn_start, node.text_range().end()), - kind: FoldKind::Function, - }); - continue; - } - } + if fn_node.body().is_some() { + // Get the actual start of the function (excluding doc comments) + let fn_start = fn_node + .fn_token() + .map(|token| token.text_range().start()) + .unwrap_or(node.text_range().start()); + res.push(Fold { + range: TextRange::new(fn_start, node.text_range().end()), + kind: FoldKind::Function, + }); + continue; } } res.push(Fold { range: element.text_range(), kind }); @@ -120,14 +119,13 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { match_ast! { match node { ast::Module(module) => { - if module.item_list().is_none() { - if let Some(range) = contiguous_range_for_item_group( + if module.item_list().is_none() + && let Some(range) = contiguous_range_for_item_group( module, &mut visited_nodes, ) { res.push(Fold { range, kind: FoldKind::Modules }) } - } }, ast::Use(use_) => { if let Some(range) = contiguous_range_for_item_group(use_, &mut visited_nodes) { @@ -212,11 +210,11 @@ where for element in first.syntax().siblings_with_tokens(Direction::Next) { let node = match element { NodeOrToken::Token(token) => { - if let Some(ws) = ast::Whitespace::cast(token) { - if !ws.spans_multiple_lines() { - // Ignore whitespace without blank lines - continue; - } + if let Some(ws) = ast::Whitespace::cast(token) + && !ws.spans_multiple_lines() + { + // Ignore whitespace without blank lines + continue; } // There is a blank line or another token, which means that the // group ends here @@ -270,21 +268,21 @@ fn contiguous_range_for_comment( for element in first.syntax().siblings_with_tokens(Direction::Next) { match element { NodeOrToken::Token(token) => { - if let Some(ws) = ast::Whitespace::cast(token.clone()) { - if !ws.spans_multiple_lines() { - // Ignore whitespace without blank lines - continue; - } + if let Some(ws) = ast::Whitespace::cast(token.clone()) + && !ws.spans_multiple_lines() + { + // Ignore whitespace without blank lines + continue; } - if let Some(c) = ast::Comment::cast(token) { - if c.kind() == group_kind { - let text = c.text().trim_start(); - // regions are not real comments - if !(text.starts_with(REGION_START) || text.starts_with(REGION_END)) { - visited.insert(c.clone()); - last = c; - continue; - } + if let Some(c) = ast::Comment::cast(token) + && c.kind() == group_kind + { + let text = c.text().trim_start(); + // regions are not real comments + if !(text.starts_with(REGION_START) || text.starts_with(REGION_END)) { + visited.insert(c.clone()); + last = c; + continue; } } // The comment group ends because either: diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index 29fc68bb50f19..84e41277390ff 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -94,18 +94,17 @@ pub(crate) fn goto_definition( let parent = token.value.parent()?; let token_file_id = token.file_id; - if let Some(token) = ast::String::cast(token.value.clone()) { - if let Some(x) = + if let Some(token) = ast::String::cast(token.value.clone()) + && let Some(x) = try_lookup_include_path(sema, InFile::new(token_file_id, token), file_id) - { - return Some(vec![x]); - } + { + return Some(vec![x]); } - if ast::TokenTree::can_cast(parent.kind()) { - if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token.value) { - return Some(vec![x]); - } + if ast::TokenTree::can_cast(parent.kind()) + && let Some(x) = try_lookup_macro_def_in_macro_use(sema, token.value) + { + return Some(vec![x]); } Some( @@ -245,12 +244,11 @@ fn try_lookup_macro_def_in_macro_use( let krate = extern_crate.resolved_crate(sema.db)?; for mod_def in krate.root_module().declarations(sema.db) { - if let ModuleDef::Macro(mac) = mod_def { - if mac.name(sema.db).as_str() == token.text() { - if let Some(nav) = mac.try_to_nav(sema.db) { - return Some(nav.call_site); - } - } + if let ModuleDef::Macro(mac) = mod_def + && mac.name(sema.db).as_str() == token.text() + && let Some(nav) = mac.try_to_nav(sema.db) + { + return Some(nav.call_site); } } diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs index 356bd69aa44ea..9960e79a5380f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs +++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs @@ -722,20 +722,19 @@ impl<'a> WalkExpandedExprCtx<'a> { self.depth += 1; } - if let ast::Expr::MacroExpr(expr) = expr { - if let Some(expanded) = + if let ast::Expr::MacroExpr(expr) = expr + && let Some(expanded) = expr.macro_call().and_then(|call| self.sema.expand_macro_call(&call)) - { - match_ast! { - match (expanded.value) { - ast::MacroStmts(it) => { - self.handle_expanded(it, cb); - }, - ast::Expr(it) => { - self.walk(&it, cb); - }, - _ => {} - } + { + match_ast! { + match (expanded.value) { + ast::MacroStmts(it) => { + self.handle_expanded(it, cb); + }, + ast::Expr(it) => { + self.walk(&it, cb); + }, + _ => {} } } } @@ -755,10 +754,10 @@ impl<'a> WalkExpandedExprCtx<'a> { } for stmt in expanded.statements() { - if let ast::Stmt::ExprStmt(stmt) = stmt { - if let Some(expr) = stmt.expr() { - self.walk(&expr, cb); - } + if let ast::Stmt::ExprStmt(stmt) = stmt + && let Some(expr) = stmt.expr() + { + self.walk(&expr, cb); } } } @@ -806,12 +805,12 @@ pub(crate) fn highlight_unsafe_points( push_to_highlights(unsafe_token_file_id, Some(unsafe_token.text_range())); // highlight unsafe operations - if let Some(block) = block_expr { - if let Some(body) = sema.body_for(InFile::new(unsafe_token_file_id, block.syntax())) { - let unsafe_ops = sema.get_unsafe_ops(body); - for unsafe_op in unsafe_ops { - push_to_highlights(unsafe_op.file_id, Some(unsafe_op.value.text_range())); - } + if let Some(block) = block_expr + && let Some(body) = sema.body_for(InFile::new(unsafe_token_file_id, block.syntax())) + { + let unsafe_ops = sema.get_unsafe_ops(body); + for unsafe_op in unsafe_ops { + push_to_highlights(unsafe_op.file_id, Some(unsafe_op.value.text_range())); } } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index e4d6279759ed7..44c98a43f6944 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -244,17 +244,15 @@ fn hover_offset( let node = token.parent()?; // special case macro calls, we wanna render the invoked arm index - if let Some(name) = ast::NameRef::cast(node.clone()) { - if let Some(path_seg) = + if let Some(name) = ast::NameRef::cast(node.clone()) + && let Some(path_seg) = name.syntax().parent().and_then(ast::PathSegment::cast) - { - if let Some(macro_call) = path_seg + && let Some(macro_call) = path_seg .parent_path() .syntax() .parent() .and_then(ast::MacroCall::cast) - { - if let Some(macro_) = sema.resolve_macro_call(¯o_call) { + && let Some(macro_) = sema.resolve_macro_call(¯o_call) { break 'a vec![( (Definition::Macro(macro_), None), sema.resolve_macro_call_arm(¯o_call), @@ -262,9 +260,6 @@ fn hover_offset( node, )]; } - } - } - } match IdentClass::classify_node(sema, &node)? { // It's better for us to fall back to the keyword hover here, diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs index 670210d4998dd..51b5900e8155a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs @@ -95,23 +95,25 @@ pub(super) fn try_expr( if let Some((hir::Adt::Enum(inner), hir::Adt::Enum(body))) = adts { let famous_defs = FamousDefs(sema, sema.scope(try_expr.syntax())?.krate()); // special case for two options, there is no value in showing them - if let Some(option_enum) = famous_defs.core_option_Option() { - if inner == option_enum && body == option_enum { - cov_mark::hit!(hover_try_expr_opt_opt); - return None; - } + if let Some(option_enum) = famous_defs.core_option_Option() + && inner == option_enum + && body == option_enum + { + cov_mark::hit!(hover_try_expr_opt_opt); + return None; } // special case two results to show the error variants only - if let Some(result_enum) = famous_defs.core_result_Result() { - if inner == result_enum && body == result_enum { - let error_type_args = - inner_ty.type_arguments().nth(1).zip(body_ty.type_arguments().nth(1)); - if let Some((inner, body)) = error_type_args { - inner_ty = inner; - body_ty = body; - "Try Error".clone_into(&mut s); - } + if let Some(result_enum) = famous_defs.core_result_Result() + && inner == result_enum + && body == result_enum + { + let error_type_args = + inner_ty.type_arguments().nth(1).zip(body_ty.type_arguments().nth(1)); + if let Some((inner, body)) = error_type_args { + inner_ty = inner; + body_ty = body; + "Try Error".clone_into(&mut s); } } } @@ -1132,10 +1134,10 @@ fn markup( ) -> (Markup, Option) { let mut buf = String::new(); - if let Some(mod_path) = mod_path { - if !mod_path.is_empty() { - format_to!(buf, "```rust\n{}\n```\n\n", mod_path); - } + if let Some(mod_path) = mod_path + && !mod_path.is_empty() + { + format_to!(buf, "```rust\n{}\n```\n\n", mod_path); } format_to!(buf, "```rust\n{}\n```", rust); @@ -1217,55 +1219,55 @@ fn render_memory_layout( format_to!(label, ", "); } - if let Some(render) = config.offset { - if let Some(offset) = offset(&layout) { - format_to!(label, "offset = "); - match render { - MemoryLayoutHoverRenderKind::Decimal => format_to!(label, "{offset}"), - MemoryLayoutHoverRenderKind::Hexadecimal => format_to!(label, "{offset:#X}"), - MemoryLayoutHoverRenderKind::Both if offset >= 10 => { - format_to!(label, "{offset} ({offset:#X})") - } - MemoryLayoutHoverRenderKind::Both => { - format_to!(label, "{offset}") - } + if let Some(render) = config.offset + && let Some(offset) = offset(&layout) + { + format_to!(label, "offset = "); + match render { + MemoryLayoutHoverRenderKind::Decimal => format_to!(label, "{offset}"), + MemoryLayoutHoverRenderKind::Hexadecimal => format_to!(label, "{offset:#X}"), + MemoryLayoutHoverRenderKind::Both if offset >= 10 => { + format_to!(label, "{offset} ({offset:#X})") + } + MemoryLayoutHoverRenderKind::Both => { + format_to!(label, "{offset}") } - format_to!(label, ", "); } + format_to!(label, ", "); } - if let Some(render) = config.padding { - if let Some((padding_name, padding)) = padding(&layout) { - format_to!(label, "{padding_name} = "); - match render { - MemoryLayoutHoverRenderKind::Decimal => format_to!(label, "{padding}"), - MemoryLayoutHoverRenderKind::Hexadecimal => format_to!(label, "{padding:#X}"), - MemoryLayoutHoverRenderKind::Both if padding >= 10 => { - format_to!(label, "{padding} ({padding:#X})") - } - MemoryLayoutHoverRenderKind::Both => { - format_to!(label, "{padding}") - } + if let Some(render) = config.padding + && let Some((padding_name, padding)) = padding(&layout) + { + format_to!(label, "{padding_name} = "); + match render { + MemoryLayoutHoverRenderKind::Decimal => format_to!(label, "{padding}"), + MemoryLayoutHoverRenderKind::Hexadecimal => format_to!(label, "{padding:#X}"), + MemoryLayoutHoverRenderKind::Both if padding >= 10 => { + format_to!(label, "{padding} ({padding:#X})") + } + MemoryLayoutHoverRenderKind::Both => { + format_to!(label, "{padding}") } - format_to!(label, ", "); } + format_to!(label, ", "); } - if config.niches { - if let Some(niches) = layout.niches() { - if niches > 1024 { - if niches.is_power_of_two() { - format_to!(label, "niches = 2{}, ", pwr2_to_exponent(niches)); - } else if is_pwr2plus1(niches) { - format_to!(label, "niches = 2{} + 1, ", pwr2_to_exponent(niches - 1)); - } else if is_pwr2minus1(niches) { - format_to!(label, "niches = 2{} - 1, ", pwr2_to_exponent(niches + 1)); - } else { - format_to!(label, "niches = a lot, "); - } + if config.niches + && let Some(niches) = layout.niches() + { + if niches > 1024 { + if niches.is_power_of_two() { + format_to!(label, "niches = 2{}, ", pwr2_to_exponent(niches)); + } else if is_pwr2plus1(niches) { + format_to!(label, "niches = 2{} + 1, ", pwr2_to_exponent(niches - 1)); + } else if is_pwr2minus1(niches) { + format_to!(label, "niches = 2{} - 1, ", pwr2_to_exponent(niches + 1)); } else { - format_to!(label, "niches = {niches}, "); + format_to!(label, "niches = a lot, "); } + } else { + format_to!(label, "niches = {niches}, "); } } label.pop(); // ' ' diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index 19e5509681aad..7a8514c47af95 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -576,13 +576,13 @@ impl InlayHintLabel { } pub fn append_part(&mut self, part: InlayHintLabelPart) { - if part.linked_location.is_none() && part.tooltip.is_none() { - if let Some(InlayHintLabelPart { text, linked_location: None, tooltip: None }) = + if part.linked_location.is_none() + && part.tooltip.is_none() + && let Some(InlayHintLabelPart { text, linked_location: None, tooltip: None }) = self.parts.last_mut() - { - text.push_str(&part.text); - return; - } + { + text.push_str(&part.text); + return; } self.parts.push(part); } @@ -1065,4 +1065,34 @@ fn bar() { "#, ); } + + #[test] + fn regression_20239() { + check_with_config( + InlayHintsConfig { parameter_hints: true, type_hints: true, ..DISABLED_CONFIG }, + r#" +//- minicore: fn +trait Iterator { + type Item; + fn map B>(self, f: F); +} +trait ToString { + fn to_string(&self); +} + +fn check_tostr_eq(left: L, right: R) +where + L: Iterator, + L::Item: ToString, + R: Iterator, + R::Item: ToString, +{ + left.map(|s| s.to_string()); + // ^ impl ToString + right.map(|s| s.to_string()); + // ^ impl ToString +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs index 49b43fc37f24a..4d020bac3aad4 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs @@ -39,10 +39,10 @@ pub(super) fn hints( if let ast::Expr::ParenExpr(_) = expr { return None; } - if let ast::Expr::BlockExpr(b) = expr { - if !b.is_standalone() { - return None; - } + if let ast::Expr::BlockExpr(b) = expr + && !b.is_standalone() + { + return None; } let descended = sema.descend_node_into_attributes(expr.clone()).pop(); diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs index 729349365e6c8..922e9598aa017 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs @@ -41,13 +41,11 @@ pub(super) fn hints( Some(it.colon_token()) }, ast::LetStmt(it) => { - if config.hide_closure_initialization_hints { - if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() { - if closure_has_block_body(&closure) { + if config.hide_closure_initialization_hints + && let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() + && closure_has_block_body(&closure) { return None; } - } - } if it.ty().is_some() { return None; } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs index ff157fa171b50..a8bb652fda226 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs @@ -51,12 +51,11 @@ pub(super) fn hints( if ty.is_unknown() { return None; } - if matches!(expr, ast::Expr::PathExpr(_)) { - if let Some(hir::Adt::Struct(st)) = ty.as_adt() { - if st.fields(sema.db).is_empty() { - return None; - } - } + if matches!(expr, ast::Expr::PathExpr(_)) + && let Some(hir::Adt::Struct(st)) = ty.as_adt() + && st.fields(sema.db).is_empty() + { + return None; } let label = label_of_ty(famous_defs, config, &ty, display_target)?; acc.push(InlayHint { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs index 05253b6794891..e80c9dc9d4732 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs @@ -120,11 +120,11 @@ pub(super) fn hints( }; if let Some(mut next) = closing_token.next_token() { - if next.kind() == T![;] { - if let Some(tok) = next.next_token() { - closing_token = next; - next = tok; - } + if next.kind() == T![;] + && let Some(tok) = next.next_token() + { + closing_token = next; + next = tok; } if !(next.kind() == SyntaxKind::WHITESPACE && next.text().contains('\n')) { // Only display the hint if the `}` is the last token on the line diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs index 9e600b5455be2..fef1cb83c1195 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs @@ -55,11 +55,9 @@ pub(super) fn hints( // Insert braces if necessary let insert_braces = |builder: &mut TextEditBuilder| { - if !has_block_body { - if let Some(range) = closure.body().map(|b| b.syntax().text_range()) { - builder.insert(range.start(), "{ ".to_owned()); - builder.insert(range.end(), " }".to_owned()); - } + if !has_block_body && let Some(range) = closure.body().map(|b| b.syntax().text_range()) { + builder.insert(range.start(), "{ ".to_owned()); + builder.insert(range.end(), " }".to_owned()); } }; diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs index 88152bf3e3883..491018a4dda84 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs @@ -81,10 +81,10 @@ fn item_hint( text_edit: Some(config.lazy_text_edit(|| { let mut builder = TextEdit::builder(); builder.insert(token.text_range().start(), "unsafe ".to_owned()); - if extern_block.unsafe_token().is_none() { - if let Some(abi) = extern_block.abi() { - builder.insert(abi.syntax().text_range().start(), "unsafe ".to_owned()); - } + if extern_block.unsafe_token().is_none() + && let Some(abi) = extern_block.abi() + { + builder.insert(abi.syntax().text_range().start(), "unsafe ".to_owned()); } builder.finish() })), diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs index 6e1b3bdbdf039..1fddb6fbe01d1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs @@ -33,10 +33,10 @@ pub(crate) fn hints( let mut args = generic_arg_list.generic_args().peekable(); let start_with_lifetime = matches!(args.peek()?, ast::GenericArg::LifetimeArg(_)); let params = generic_def.params(sema.db).into_iter().filter(|p| { - if let hir::GenericParam::TypeParam(it) = p { - if it.is_implicit(sema.db) { - return false; - } + if let hir::GenericParam::TypeParam(it) = p + && it.is_implicit(sema.db) + { + return false; } if !start_with_lifetime { return !matches!(p, hir::GenericParam::LifetimeParam(_)); diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs index 7212efd954e88..bddce904dfdea 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs @@ -22,30 +22,31 @@ pub(super) fn hints( return None; } - if let Either::Right(it) = &statik_or_const { - if ast::AssocItemList::can_cast( + if let Either::Right(it) = &statik_or_const + && ast::AssocItemList::can_cast( it.syntax().parent().map_or(SyntaxKind::EOF, |it| it.kind()), - ) { - return None; - } + ) + { + return None; } - if let Some(ast::Type::RefType(ty)) = statik_or_const.either(|it| it.ty(), |it| it.ty()) { - if ty.lifetime().is_none() { - let t = ty.amp_token()?; - acc.push(InlayHint { - range: t.text_range(), - kind: InlayKind::Lifetime, - label: "'static".into(), - text_edit: Some(config.lazy_text_edit(|| { - TextEdit::insert(t.text_range().start(), "'static ".into()) - })), - position: InlayHintPosition::After, - pad_left: false, - pad_right: true, - resolve_parent: None, - }); - } + if let Some(ast::Type::RefType(ty)) = statik_or_const.either(|it| it.ty(), |it| it.ty()) + && ty.lifetime().is_none() + { + let t = ty.amp_token()?; + acc.push(InlayHint { + range: t.text_range(), + kind: InlayKind::Lifetime, + label: "'static".into(), + text_edit: Some( + config + .lazy_text_edit(|| TextEdit::insert(t.text_range().start(), "'static ".into())), + ), + position: InlayHintPosition::After, + pad_left: false, + pad_right: true, + resolve_parent: None, + }); } Some(()) diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs index 49fec0a793c3a..a89c53e00b3b0 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs @@ -324,35 +324,35 @@ fn hints_( // apply hints // apply output if required - if let (Some(output_lt), Some(r)) = (&output, ret_type) { - if let Some(ty) = r.ty() { - walk_ty(&ty, &mut |ty| match ty { - ast::Type::RefType(ty) if ty.lifetime().is_none() => { - if let Some(amp) = ty.amp_token() { - is_trivial = false; - acc.push(mk_lt_hint(amp, output_lt.to_string())); - } - false + if let (Some(output_lt), Some(r)) = (&output, ret_type) + && let Some(ty) = r.ty() + { + walk_ty(&ty, &mut |ty| match ty { + ast::Type::RefType(ty) if ty.lifetime().is_none() => { + if let Some(amp) = ty.amp_token() { + is_trivial = false; + acc.push(mk_lt_hint(amp, output_lt.to_string())); } - ast::Type::FnPtrType(_) => { + false + } + ast::Type::FnPtrType(_) => { + is_trivial = false; + true + } + ast::Type::PathType(t) => { + if t.path() + .and_then(|it| it.segment()) + .and_then(|it| it.parenthesized_arg_list()) + .is_some() + { is_trivial = false; true + } else { + false } - ast::Type::PathType(t) => { - if t.path() - .and_then(|it| it.segment()) - .and_then(|it| it.parenthesized_arg_list()) - .is_some() - { - is_trivial = false; - true - } else { - false - } - } - _ => false, - }) - } + } + _ => false, + }) } if config.lifetime_elision_hints == LifetimeElisionHints::SkipTrivial && is_trivial { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs index 5174228466c08..ec0a4c46c7fec 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs @@ -135,10 +135,10 @@ fn should_hide_param_name_hint( } if unary_function { - if let Some(function_name) = function_name { - if is_param_name_suffix_of_fn_name(param_name, function_name) { - return true; - } + if let Some(function_name) = function_name + && is_param_name_suffix_of_fn_name(param_name, function_name) + { + return true; } if is_obvious_param(param_name) { return true; diff --git a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs index 0188c105faa78..a946559c35455 100644 --- a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs +++ b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs @@ -144,15 +144,15 @@ fn remove_newline( } } - if config.join_else_if { - if let (Some(prev), Some(_next)) = (as_if_expr(&prev), as_if_expr(&next)) { - match prev.else_token() { - Some(_) => cov_mark::hit!(join_two_ifs_with_existing_else), - None => { - cov_mark::hit!(join_two_ifs); - edit.replace(token.text_range(), " else ".to_owned()); - return; - } + if config.join_else_if + && let (Some(prev), Some(_next)) = (as_if_expr(&prev), as_if_expr(&next)) + { + match prev.else_token() { + Some(_) => cov_mark::hit!(join_two_ifs_with_existing_else), + None => { + cov_mark::hit!(join_two_ifs); + edit.replace(token.text_range(), " else ".to_owned()); + return; } } } @@ -213,10 +213,10 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Op let mut buf = expr.syntax().text().to_string(); // Match block needs to have a comma after the block - if let Some(match_arm) = block_expr.syntax().parent().and_then(ast::MatchArm::cast) { - if match_arm.comma_token().is_none() { - buf.push(','); - } + if let Some(match_arm) = block_expr.syntax().parent().and_then(ast::MatchArm::cast) + && match_arm.comma_token().is_none() + { + buf.push(','); } edit.replace(block_range, buf); diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index b3b8deb61fc0e..98877482ed863 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -299,7 +299,7 @@ impl Analysis { /// Gets the text of the source file. pub fn file_text(&self, file_id: FileId) -> Cancellable> { - self.with_db(|db| SourceDatabase::file_text(db, file_id).text(db)) + self.with_db(|db| SourceDatabase::file_text(db, file_id).text(db).clone()) } /// Gets the syntax tree of the file. diff --git a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs index 50219cee57db4..96d829d1260bd 100644 --- a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs +++ b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs @@ -29,14 +29,13 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec(source_file.syntax(), position.offset); // If cursor is literally on `mod foo`, go to the grandpa. - if let Some(m) = &module { - if !m + if let Some(m) = &module + && !m .item_list() .is_some_and(|it| it.syntax().text_range().contains_inclusive(position.offset)) - { - cov_mark::hit!(test_resolve_parent_module_on_module_decl); - module = m.syntax().ancestors().skip(1).find_map(ast::Module::cast); - } + { + cov_mark::hit!(test_resolve_parent_module_on_module_decl); + module = m.syntax().ancestors().skip(1).find_map(ast::Module::cast); } match module { diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs index a07c647c2cb83..aea4ae0fd9702 100644 --- a/src/tools/rust-analyzer/crates/ide/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs @@ -13,8 +13,11 @@ use ide_db::{ }; use itertools::Itertools; use std::fmt::Write; -use stdx::{always, never}; -use syntax::{AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, ast}; +use stdx::{always, format_to, never}; +use syntax::{ + AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, + ast::{self, HasArgList, prec::ExprPrecedence}, +}; use ide_db::text_edit::TextEdit; @@ -35,13 +38,8 @@ pub(crate) fn prepare_rename( let syntax = source_file.syntax(); let res = find_definitions(&sema, syntax, position, &Name::new_symbol_root(sym::underscore))? - .map(|(frange, kind, def, _, _)| { - // ensure all ranges are valid - - if def.range_for_rename(&sema).is_none() { - bail!("No references found at position") - } - + .filter(|(_, _, def, _, _)| def.range_for_rename(&sema).is_some()) + .map(|(frange, kind, _, _, _)| { always!( frange.range.contains_inclusive(position.offset) && frange.file_id == position.file_id @@ -336,6 +334,85 @@ fn find_definitions( } } +fn transform_assoc_fn_into_method_call( + sema: &Semantics<'_, RootDatabase>, + source_change: &mut SourceChange, + f: hir::Function, +) { + let calls = Definition::Function(f).usages(sema).all(); + for (file_id, calls) in calls { + for call in calls { + let Some(fn_name) = call.name.as_name_ref() else { continue }; + let Some(path) = fn_name.syntax().parent().and_then(ast::PathSegment::cast) else { + continue; + }; + let path = path.parent_path(); + // The `PathExpr` is the direct parent, above it is the `CallExpr`. + let Some(call) = + path.syntax().parent().and_then(|it| ast::CallExpr::cast(it.parent()?)) + else { + continue; + }; + + let Some(arg_list) = call.arg_list() else { continue }; + let mut args = arg_list.args(); + let Some(mut self_arg) = args.next() else { continue }; + let second_arg = args.next(); + + // Strip (de)references, as they will be taken automatically by auto(de)ref. + loop { + let self_ = match &self_arg { + ast::Expr::RefExpr(self_) => self_.expr(), + ast::Expr::ParenExpr(self_) => self_.expr(), + ast::Expr::PrefixExpr(self_) + if self_.op_kind() == Some(ast::UnaryOp::Deref) => + { + self_.expr() + } + _ => break, + }; + self_arg = match self_ { + Some(it) => it, + None => break, + }; + } + + let self_needs_parens = + self_arg.precedence().needs_parentheses_in(ExprPrecedence::Postfix); + + let replace_start = path.syntax().text_range().start(); + let replace_end = match second_arg { + Some(second_arg) => second_arg.syntax().text_range().start(), + None => arg_list + .r_paren_token() + .map(|it| it.text_range().start()) + .unwrap_or_else(|| arg_list.syntax().text_range().end()), + }; + let replace_range = TextRange::new(replace_start, replace_end); + + let Some(macro_mapped_self) = sema.original_range_opt(self_arg.syntax()) else { + continue; + }; + let mut replacement = String::new(); + if self_needs_parens { + replacement.push('('); + } + replacement.push_str(macro_mapped_self.text(sema.db)); + if self_needs_parens { + replacement.push(')'); + } + replacement.push('.'); + format_to!(replacement, "{fn_name}"); + replacement.push('('); + + source_change.insert_source_edit( + file_id.file_id(sema.db), + TextEdit::replace(replace_range, replacement), + ); + } + } +} + fn rename_to_self( sema: &Semantics<'_, RootDatabase>, local: hir::Local, @@ -413,6 +490,7 @@ fn rename_to_self( file_id.original_file(sema.db).file_id(sema.db), TextEdit::replace(param_source.syntax().text_range(), String::from(self_param)), ); + transform_assoc_fn_into_method_call(sema, &mut source_change, fn_def); Ok(source_change) } @@ -499,10 +577,10 @@ mod tests { ) { let ra_fixture_after = &trim_indent(ra_fixture_after); let (analysis, position) = fixture::position(ra_fixture_before); - if !ra_fixture_after.starts_with("error: ") { - if let Err(err) = analysis.prepare_rename(position).unwrap() { - panic!("Prepare rename to '{new_name}' was failed: {err}") - } + if !ra_fixture_after.starts_with("error: ") + && let Err(err) = analysis.prepare_rename(position).unwrap() + { + panic!("Prepare rename to '{new_name}' was failed: {err}") } let rename_result = analysis .rename(position, new_name) @@ -3417,4 +3495,78 @@ fn other_place() { Quux::Bar$0; } "#, ); } + + #[test] + fn rename_to_self_callers() { + check( + "self", + r#" +//- minicore: add +struct Foo; +impl core::ops::Add for Foo { + type Target = Foo; + fn add(self, _: Self) -> Foo { Foo } +} + +impl Foo { + fn foo(th$0is: &Self) {} +} + +fn bar(v: &Foo) { + Foo::foo(v); +} + +fn baz() { + Foo::foo(&Foo); + Foo::foo(Foo + Foo); +} + "#, + r#" +struct Foo; +impl core::ops::Add for Foo { + type Target = Foo; + fn add(self, _: Self) -> Foo { Foo } +} + +impl Foo { + fn foo(&self) {} +} + +fn bar(v: &Foo) { + v.foo(); +} + +fn baz() { + Foo.foo(); + (Foo + Foo).foo(); +} + "#, + ); + // Multiple arguments: + check( + "self", + r#" +struct Foo; + +impl Foo { + fn foo(th$0is: &Self, v: i32) {} +} + +fn bar(v: Foo) { + Foo::foo(&v, 123); +} + "#, + r#" +struct Foo; + +impl Foo { + fn foo(&self, v: i32) {} +} + +fn bar(v: Foo) { + v.foo(123); +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index 9d1a5bae96fbc..83e5c5ab1dfeb 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -514,20 +514,19 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { .flat_map(|it| it.name(db)) .for_each(|name| format_to!(path, "{}::", name.display(db, edition))); // This probably belongs to canonical_path? - if let Some(assoc_item) = def.as_assoc_item(db) { - if let Some(ty) = assoc_item.implementing_ty(db) { - if let Some(adt) = ty.as_adt() { - let name = adt.name(db); - let mut ty_args = ty.generic_parameters(db, display_target).peekable(); - format_to!(path, "{}", name.display(db, edition)); - if ty_args.peek().is_some() { - format_to!(path, "<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))); - } - format_to!(path, "::{}", def_name.display(db, edition)); - path.retain(|c| c != ' '); - return Some(path); - } + if let Some(assoc_item) = def.as_assoc_item(db) + && let Some(ty) = assoc_item.implementing_ty(db) + && let Some(adt) = ty.as_adt() + { + let name = adt.name(db); + let mut ty_args = ty.generic_parameters(db, display_target).peekable(); + format_to!(path, "{}", name.display(db, edition)); + if ty_args.peek().is_some() { + format_to!(path, "<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))); } + format_to!(path, "::{}", def_name.display(db, edition)); + path.retain(|c| c != ' '); + return Some(path); } format_to!(path, "{}", def_name.display(db, edition)); Some(path) @@ -697,14 +696,13 @@ impl UpdateTest { continue; }; for item in items { - if let hir::ItemInNs::Macros(makro) = item { - if Definition::Macro(makro) + if let hir::ItemInNs::Macros(makro) = item + && Definition::Macro(makro) .usages(sema) .in_scope(&search_scope) .at_least_one() - { - return true; - } + { + return true; } } } diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs index e30a3ebefb98c..382573b680113 100644 --- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs +++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs @@ -146,12 +146,11 @@ pub(crate) fn signature_help( // Stop at multi-line expressions, since the signature of the outer call is not very // helpful inside them. - if let Some(expr) = ast::Expr::cast(node.clone()) { - if !matches!(expr, ast::Expr::RecordExpr(..)) - && expr.syntax().text().contains_char('\n') - { - break; - } + if let Some(expr) = ast::Expr::cast(node.clone()) + && !matches!(expr, ast::Expr::RecordExpr(..)) + && expr.syntax().text().contains_char('\n') + { + break; } } @@ -366,10 +365,10 @@ fn signature_help_for_generics( res.signature.push('<'); let mut buf = String::new(); for param in params { - if let hir::GenericParam::TypeParam(ty) = param { - if ty.is_implicit(db) { - continue; - } + if let hir::GenericParam::TypeParam(ty) = param + && ty.is_implicit(db) + { + continue; } buf.clear(); diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index efee39c13db94..694ac22e1993b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -133,10 +133,10 @@ fn get_definitions( ) -> Option> { for token in sema.descend_into_macros_exact(token) { let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); - if let Some(defs) = def { - if !defs.is_empty() { - return Some(defs); - } + if let Some(defs) = def + && !defs.is_empty() + { + return Some(defs); } } None diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs index 87db0cd7dc53c..8bde8fd970063 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs @@ -306,12 +306,12 @@ fn highlight_name_ref( }; let mut h = match name_class { NameRefClass::Definition(def, _) => { - if let Definition::Local(local) = &def { - if let Some(bindings_shadow_count) = bindings_shadow_count { - let name = local.name(sema.db); - let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); - *binding_hash = Some(calc_binding_hash(&name, *shadow_count)) - } + if let Definition::Local(local) = &def + && let Some(bindings_shadow_count) = bindings_shadow_count + { + let name = local.name(sema.db); + let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); + *binding_hash = Some(calc_binding_hash(&name, *shadow_count)) }; let mut h = highlight_def(sema, krate, def, edition, true); @@ -437,21 +437,21 @@ fn highlight_name( edition: Edition, ) -> Highlight { let name_kind = NameClass::classify(sema, &name); - if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind { - if let Some(bindings_shadow_count) = bindings_shadow_count { - let name = local.name(sema.db); - let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); - *shadow_count += 1; - *binding_hash = Some(calc_binding_hash(&name, *shadow_count)) - } + if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind + && let Some(bindings_shadow_count) = bindings_shadow_count + { + let name = local.name(sema.db); + let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); + *shadow_count += 1; + *binding_hash = Some(calc_binding_hash(&name, *shadow_count)) }; match name_kind { Some(NameClass::Definition(def)) => { let mut h = highlight_def(sema, krate, def, edition, false) | HlMod::Definition; - if let Definition::Trait(trait_) = &def { - if trait_.is_unsafe(sema.db) { - h |= HlMod::Unsafe; - } + if let Definition::Trait(trait_) = &def + && trait_.is_unsafe(sema.db) + { + h |= HlMod::Unsafe; } h } @@ -743,10 +743,9 @@ fn highlight_method_call( hir::Access::Owned => { if let Some(receiver_ty) = method_call.receiver().and_then(|it| sema.type_of_expr(&it)) + && !receiver_ty.adjusted().is_copy(sema.db) { - if !receiver_ty.adjusted().is_copy(sema.db) { - h |= HlMod::Consuming - } + h |= HlMod::Consuming } } } diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index 26ee698af0812..ad838a6550eca 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -475,10 +475,10 @@ fn load_crate_graph_into_db( } let changes = vfs.take_changes(); for (_, file) in changes { - if let vfs::Change::Create(v, _) | vfs::Change::Modify(v, _) = file.change { - if let Ok(text) = String::from_utf8(v) { - analysis_change.change_file(file.file_id, Some(text)) - } + if let vfs::Change::Create(v, _) | vfs::Change::Modify(v, _) = file.change + && let Ok(text) = String::from_utf8(v) + { + analysis_change.change_file(file.file_id, Some(text)) } } let source_roots = source_root_config.partition(vfs); @@ -533,7 +533,7 @@ impl ProcMacroExpander for Expander { current_dir, ) { Ok(Ok(subtree)) => Ok(subtree), - Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), + Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err)), Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), } } diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs index 04ac85ad43ddf..b185556b5c7b7 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs @@ -185,24 +185,22 @@ fn invocation_fixtures( for it in tokens.iter() { collect_from_op(it, builder, seed); } - if i + 1 != cnt { - if let Some(sep) = separator { - match &**sep { - Separator::Literal(it) => { - builder.push(tt::Leaf::Literal(it.clone())) + if i + 1 != cnt + && let Some(sep) = separator + { + match &**sep { + Separator::Literal(it) => builder.push(tt::Leaf::Literal(it.clone())), + Separator::Ident(it) => builder.push(tt::Leaf::Ident(it.clone())), + Separator::Puncts(puncts) => { + for it in puncts { + builder.push(tt::Leaf::Punct(*it)) } - Separator::Ident(it) => builder.push(tt::Leaf::Ident(it.clone())), - Separator::Puncts(puncts) => { - for it in puncts { - builder.push(tt::Leaf::Punct(*it)) - } - } - Separator::Lifetime(punct, ident) => { - builder.push(tt::Leaf::Punct(*punct)); - builder.push(tt::Leaf::Ident(ident.clone())); - } - }; - } + } + Separator::Lifetime(punct, ident) => { + builder.push(tt::Leaf::Punct(*punct)); + builder.push(tt::Leaf::Ident(ident.clone())); + } + }; } } } diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs index a8d5965d480c2..189efcd15c2f7 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs @@ -475,12 +475,12 @@ fn match_loop_inner<'t>( }) } OpDelimited::Op(Op::Subtree { tokens, delimiter }) => { - if let Ok((subtree, _)) = src.clone().expect_subtree() { - if subtree.delimiter.kind == delimiter.kind { - item.stack.push(item.dot); - item.dot = tokens.iter_delimited_with(*delimiter); - cur_items.push(item); - } + if let Ok((subtree, _)) = src.clone().expect_subtree() + && subtree.delimiter.kind == delimiter.kind + { + item.stack.push(item.dot); + item.dot = tokens.iter_delimited_with(*delimiter); + cur_items.push(item); } } OpDelimited::Op(Op::Var { kind, name, .. }) => { diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs index 2b4151e3b752d..41fd72d8d5a2f 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs @@ -77,38 +77,38 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) { return; } - if let Some((cm, blocklike)) = expr_stmt(p, Some(m)) { - if !(p.at(T!['}']) || (semicolon != Semicolon::Required && p.at(EOF))) { - // test no_semi_after_block - // fn foo() { - // if true {} - // loop {} - // match () {} - // while true {} - // for _ in () {} - // {} - // {} - // macro_rules! test { - // () => {} - // } - // test!{} - // } - let m = cm.precede(p); - match semicolon { - Semicolon::Required => { - if blocklike.is_block() { - p.eat(T![;]); - } else { - p.expect(T![;]); - } - } - Semicolon::Optional => { + if let Some((cm, blocklike)) = expr_stmt(p, Some(m)) + && !(p.at(T!['}']) || (semicolon != Semicolon::Required && p.at(EOF))) + { + // test no_semi_after_block + // fn foo() { + // if true {} + // loop {} + // match () {} + // while true {} + // for _ in () {} + // {} + // {} + // macro_rules! test { + // () => {} + // } + // test!{} + // } + let m = cm.precede(p); + match semicolon { + Semicolon::Required => { + if blocklike.is_block() { p.eat(T![;]); + } else { + p.expect(T![;]); } - Semicolon::Forbidden => (), } - m.complete(p, EXPR_STMT); + Semicolon::Optional => { + p.eat(T![;]); + } + Semicolon::Forbidden => (), } + m.complete(p, EXPR_STMT); } } @@ -134,14 +134,11 @@ pub(super) fn let_stmt(p: &mut Parser<'_>, with_semi: Semicolon) { if p.at(T![else]) { // test_err let_else_right_curly_brace // fn func() { let Some(_) = {Some(1)} else { panic!("h") };} - if let Some(expr) = expr_after_eq { - if let Some(token) = expr.last_token(p) { - if token == T!['}'] { - p.error( - "right curly brace `}` before `else` in a `let...else` statement not allowed" - ) - } - } + if let Some(expr) = expr_after_eq + && let Some(token) = expr.last_token(p) + && token == T!['}'] + { + p.error("right curly brace `}` before `else` in a `let...else` statement not allowed") } // test let_else diff --git a/src/tools/rust-analyzer/crates/parser/src/input.rs b/src/tools/rust-analyzer/crates/parser/src/input.rs index 4490956f97046..331bc58dd0523 100644 --- a/src/tools/rust-analyzer/crates/parser/src/input.rs +++ b/src/tools/rust-analyzer/crates/parser/src/input.rs @@ -61,7 +61,7 @@ impl Input { #[inline] fn push_impl(&mut self, kind: SyntaxKind, contextual_kind: SyntaxKind) { let idx = self.len(); - if idx % (bits::BITS as usize) == 0 { + if idx.is_multiple_of(bits::BITS as usize) { self.joint.push(0); } self.kind.push(kind); diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs index e2baec890c3a6..d5e513933f7a0 100644 --- a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs +++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs @@ -252,10 +252,10 @@ fn n_attached_trivias<'a>( WHITESPACE if text.contains("\n\n") => { // we check whether the next token is a doc-comment // and skip the whitespace in this case - if let Some((COMMENT, peek_text)) = trivias.peek().map(|(_, pair)| pair) { - if is_outer(peek_text) { - continue; - } + if let Some((COMMENT, peek_text)) = trivias.peek().map(|(_, pair)| pair) + && is_outer(peek_text) + { + continue; } break; } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs new file mode 100644 index 0000000000000..ee96b899fe57f --- /dev/null +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs @@ -0,0 +1,172 @@ +//! The initial proc-macro-srv protocol, soon to be deprecated. + +pub mod json; +pub mod msg; + +use std::{ + io::{BufRead, Write}, + sync::Arc, +}; + +use paths::AbsPath; +use span::Span; + +use crate::{ + ProcMacro, ProcMacroKind, ServerError, + legacy_protocol::{ + json::{read_json, write_json}, + msg::{ + ExpandMacro, ExpandMacroData, ExpnGlobals, FlatTree, Message, Request, Response, + ServerConfig, SpanDataIndexMap, deserialize_span_data_index_map, + flat::serialize_span_data_index_map, + }, + }, + process::ProcMacroServerProcess, + version, +}; + +pub(crate) use crate::legacy_protocol::msg::SpanMode; + +/// Legacy span type, only defined here as it is still used by the proc-macro server. +/// While rust-analyzer doesn't use this anymore at all, RustRover relies on the legacy type for +/// proc-macro expansion. +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct SpanId(pub u32); + +impl std::fmt::Debug for SpanId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +pub(crate) fn version_check(srv: &ProcMacroServerProcess) -> Result { + let request = Request::ApiVersionCheck {}; + let response = send_task(srv, request)?; + + match response { + Response::ApiVersionCheck(version) => Ok(version), + _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + } +} + +/// Enable support for rust-analyzer span mode if the server supports it. +pub(crate) fn enable_rust_analyzer_spans( + srv: &ProcMacroServerProcess, +) -> Result { + let request = Request::SetConfig(ServerConfig { span_mode: SpanMode::RustAnalyzer }); + let response = send_task(srv, request)?; + + match response { + Response::SetConfig(ServerConfig { span_mode }) => Ok(span_mode), + _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + } +} + +/// Finds proc-macros in a given dynamic library. +pub(crate) fn find_proc_macros( + srv: &ProcMacroServerProcess, + dylib_path: &AbsPath, +) -> Result, String>, ServerError> { + let request = Request::ListMacros { dylib_path: dylib_path.to_path_buf().into() }; + + let response = send_task(srv, request)?; + + match response { + Response::ListMacros(it) => Ok(it), + _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + } +} + +pub(crate) fn expand( + proc_macro: &ProcMacro, + subtree: tt::SubtreeView<'_, Span>, + attr: Option>, + env: Vec<(String, String)>, + def_site: Span, + call_site: Span, + mixed_site: Span, + current_dir: String, +) -> Result>, String>, crate::ServerError> +{ + let version = proc_macro.process.version(); + let mut span_data_table = SpanDataIndexMap::default(); + let def_site = span_data_table.insert_full(def_site).0; + let call_site = span_data_table.insert_full(call_site).0; + let mixed_site = span_data_table.insert_full(mixed_site).0; + let task = ExpandMacro { + data: ExpandMacroData { + macro_body: FlatTree::new(subtree, version, &mut span_data_table), + macro_name: proc_macro.name.to_string(), + attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)), + has_global_spans: ExpnGlobals { + serialize: version >= version::HAS_GLOBAL_SPANS, + def_site, + call_site, + mixed_site, + }, + span_data_table: if proc_macro.process.rust_analyzer_spans() { + serialize_span_data_index_map(&span_data_table) + } else { + Vec::new() + }, + }, + lib: proc_macro.dylib_path.to_path_buf().into(), + env, + current_dir: Some(current_dir), + }; + + let response = send_task(&proc_macro.process, Request::ExpandMacro(Box::new(task)))?; + + match response { + Response::ExpandMacro(it) => Ok(it + .map(|tree| { + let mut expanded = FlatTree::to_subtree_resolved(tree, version, &span_data_table); + if proc_macro.needs_fixup_change() { + proc_macro.change_fixup_to_match_old_server(&mut expanded); + } + expanded + }) + .map_err(|msg| msg.0)), + Response::ExpandMacroExtended(it) => Ok(it + .map(|resp| { + let mut expanded = FlatTree::to_subtree_resolved( + resp.tree, + version, + &deserialize_span_data_index_map(&resp.span_data_table), + ); + if proc_macro.needs_fixup_change() { + proc_macro.change_fixup_to_match_old_server(&mut expanded); + } + expanded + }) + .map_err(|msg| msg.0)), + _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + } +} + +/// Sends a request to the proc-macro server and waits for a response. +fn send_task(srv: &ProcMacroServerProcess, req: Request) -> Result { + if let Some(server_error) = srv.exited() { + return Err(server_error.clone()); + } + + srv.send_task(send_request, req) +} + +/// Sends a request to the server and reads the response. +fn send_request( + mut writer: &mut dyn Write, + mut reader: &mut dyn BufRead, + req: Request, + buf: &mut String, +) -> Result, ServerError> { + req.write(write_json, &mut writer).map_err(|err| ServerError { + message: "failed to write request".into(), + io: Some(Arc::new(err)), + })?; + let res = Response::read(read_json, &mut reader, buf).map_err(|err| ServerError { + message: "failed to read response".into(), + io: Some(Arc::new(err)), + })?; + Ok(res) +} diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs index 165936269d35d..b795c45589564 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs @@ -1,5 +1,6 @@ //! Defines messages for cross-process message passing based on `ndjson` wire protocol pub(crate) mod flat; +pub use self::flat::*; use std::io::{self, BufRead, Write}; @@ -9,24 +10,6 @@ use serde_derive::{Deserialize, Serialize}; use crate::ProcMacroKind; -pub use self::flat::{ - FlatTree, SpanDataIndexMap, deserialize_span_data_index_map, serialize_span_data_index_map, -}; -pub use span::TokenId; - -// The versions of the server protocol -pub const NO_VERSION_CHECK_VERSION: u32 = 0; -pub const VERSION_CHECK_VERSION: u32 = 1; -pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2; -pub const HAS_GLOBAL_SPANS: u32 = 3; -pub const RUST_ANALYZER_SPAN_SUPPORT: u32 = 4; -/// Whether literals encode their kind as an additional u32 field and idents their rawness as a u32 field. -pub const EXTENDED_LEAF_DATA: u32 = 5; -pub const HASHED_AST_ID: u32 = 6; - -/// Current API version of the proc-macro protocol. -pub const CURRENT_API_VERSION: u32 = HASHED_AST_ID; - /// Represents requests sent from the client to the proc-macro-srv. #[derive(Debug, Serialize, Deserialize)] pub enum Request { @@ -48,7 +31,7 @@ pub enum Request { } /// Defines the mode used for handling span data. -#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)] +#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq)] pub enum SpanMode { /// Default mode, where spans are identified by an ID. #[default] @@ -210,6 +193,8 @@ mod tests { TopSubtreeBuilder, }; + use crate::version; + use super::*; fn fixture_token_tree() -> TopSubtree { @@ -308,7 +293,7 @@ mod tests { #[test] fn test_proc_macro_rpc_works() { let tt = fixture_token_tree(); - for v in RUST_ANALYZER_SPAN_SUPPORT..=CURRENT_API_VERSION { + for v in version::RUST_ANALYZER_SPAN_SUPPORT..=version::CURRENT_API_VERSION { let mut span_data_table = Default::default(); let task = ExpandMacro { data: ExpandMacroData { diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs index 597ffa05d203e..fb3542d24f460 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs @@ -40,9 +40,12 @@ use std::collections::VecDeque; use intern::Symbol; use rustc_hash::FxHashMap; use serde_derive::{Deserialize, Serialize}; -use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContext, TextRange, TokenId}; +use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContext, TextRange}; -use crate::legacy_protocol::msg::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA}; +use crate::{ + legacy_protocol::SpanId, + version::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA}, +}; pub type SpanDataIndexMap = indexmap::IndexSet>; @@ -62,7 +65,7 @@ pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec { } pub fn deserialize_span_data_index_map(map: &[u32]) -> SpanDataIndexMap { - debug_assert!(map.len() % 5 == 0); + debug_assert!(map.len().is_multiple_of(5)); map.chunks_exact(5) .map(|span| { let &[file_id, ast_id, start, end, e] = span else { unreachable!() }; @@ -91,27 +94,27 @@ pub struct FlatTree { } struct SubtreeRepr { - open: TokenId, - close: TokenId, + open: SpanId, + close: SpanId, kind: tt::DelimiterKind, tt: [u32; 2], } struct LiteralRepr { - id: TokenId, + id: SpanId, text: u32, suffix: u32, kind: u16, } struct PunctRepr { - id: TokenId, + id: SpanId, char: char, spacing: tt::Spacing, } struct IdentRepr { - id: TokenId, + id: SpanId, text: u32, is_raw: bool, } @@ -122,7 +125,7 @@ impl FlatTree { version: u32, span_data_table: &mut SpanDataIndexMap, ) -> FlatTree { - let mut w = Writer { + let mut w = Writer:: { string_table: FxHashMap::default(), work: VecDeque::new(), span_data_table, @@ -159,8 +162,11 @@ impl FlatTree { } } - pub fn new_raw(subtree: tt::SubtreeView<'_, TokenId>, version: u32) -> FlatTree { - let mut w = Writer { + pub fn new_raw>( + subtree: tt::SubtreeView<'_, T::Span>, + version: u32, + ) -> FlatTree { + let mut w = Writer:: { string_table: FxHashMap::default(), work: VecDeque::new(), span_data_table: &mut (), @@ -202,7 +208,7 @@ impl FlatTree { version: u32, span_data_table: &SpanDataIndexMap, ) -> tt::TopSubtree { - Reader { + Reader:: { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { read_vec(self.subtree, SubtreeRepr::read_with_close_span) } else { @@ -227,8 +233,11 @@ impl FlatTree { .read() } - pub fn to_subtree_unresolved(self, version: u32) -> tt::TopSubtree { - Reader { + pub fn to_subtree_unresolved>( + self, + version: u32, + ) -> tt::TopSubtree { + Reader:: { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { read_vec(self.subtree, SubtreeRepr::read_with_close_span) } else { @@ -283,7 +292,7 @@ impl SubtreeRepr { 3 => tt::DelimiterKind::Bracket, other => panic!("bad kind {other}"), }; - SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] } + SubtreeRepr { open: SpanId(open), close: SpanId(!0), kind, tt: [lo, len] } } fn write_with_close_span(self) -> [u32; 5] { let kind = match self.kind { @@ -302,7 +311,7 @@ impl SubtreeRepr { 3 => tt::DelimiterKind::Bracket, other => panic!("bad kind {other}"), }; - SubtreeRepr { open: TokenId(open), close: TokenId(close), kind, tt: [lo, len] } + SubtreeRepr { open: SpanId(open), close: SpanId(close), kind, tt: [lo, len] } } } @@ -311,13 +320,13 @@ impl LiteralRepr { [self.id.0, self.text] } fn read([id, text]: [u32; 2]) -> LiteralRepr { - LiteralRepr { id: TokenId(id), text, kind: 0, suffix: !0 } + LiteralRepr { id: SpanId(id), text, kind: 0, suffix: !0 } } fn write_with_kind(self) -> [u32; 4] { [self.id.0, self.text, self.kind as u32, self.suffix] } fn read_with_kind([id, text, kind, suffix]: [u32; 4]) -> LiteralRepr { - LiteralRepr { id: TokenId(id), text, kind: kind as u16, suffix } + LiteralRepr { id: SpanId(id), text, kind: kind as u16, suffix } } } @@ -335,7 +344,7 @@ impl PunctRepr { 1 => tt::Spacing::Joint, other => panic!("bad spacing {other}"), }; - PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing } + PunctRepr { id: SpanId(id), char: char.try_into().unwrap(), spacing } } } @@ -344,44 +353,46 @@ impl IdentRepr { [self.id.0, self.text] } fn read(data: [u32; 2]) -> IdentRepr { - IdentRepr { id: TokenId(data[0]), text: data[1], is_raw: false } + IdentRepr { id: SpanId(data[0]), text: data[1], is_raw: false } } fn write_with_rawness(self) -> [u32; 3] { [self.id.0, self.text, self.is_raw as u32] } fn read_with_rawness([id, text, is_raw]: [u32; 3]) -> IdentRepr { - IdentRepr { id: TokenId(id), text, is_raw: is_raw == 1 } + IdentRepr { id: SpanId(id), text, is_raw: is_raw == 1 } } } -trait InternableSpan: Copy { +pub trait SpanTransformer { type Table; - fn token_id_of(table: &mut Self::Table, s: Self) -> TokenId; - fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self; + type Span: Copy; + fn token_id_of(table: &mut Self::Table, s: Self::Span) -> SpanId; + fn span_for_token_id(table: &Self::Table, id: SpanId) -> Self::Span; } - -impl InternableSpan for TokenId { +impl SpanTransformer for SpanId { type Table = (); - fn token_id_of((): &mut Self::Table, token_id: Self) -> TokenId { + type Span = Self; + fn token_id_of((): &mut Self::Table, token_id: Self::Span) -> SpanId { token_id } - fn span_for_token_id((): &Self::Table, id: TokenId) -> Self { + fn span_for_token_id((): &Self::Table, id: SpanId) -> Self::Span { id } } -impl InternableSpan for Span { +impl SpanTransformer for Span { type Table = SpanDataIndexMap; - fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId { - TokenId(table.insert_full(span).0 as u32) + type Span = Self; + fn token_id_of(table: &mut Self::Table, span: Self::Span) -> SpanId { + SpanId(table.insert_full(span).0 as u32) } - fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self { + fn span_for_token_id(table: &Self::Table, id: SpanId) -> Self::Span { *table.get_index(id.0 as usize).unwrap_or_else(|| &table[0]) } } -struct Writer<'a, 'span, S: InternableSpan> { - work: VecDeque<(usize, tt::iter::TtIter<'a, S>)>, +struct Writer<'a, 'span, S: SpanTransformer> { + work: VecDeque<(usize, tt::iter::TtIter<'a, S::Span>)>, string_table: FxHashMap, u32>, span_data_table: &'span mut S::Table, version: u32, @@ -394,8 +405,8 @@ struct Writer<'a, 'span, S: InternableSpan> { text: Vec, } -impl<'a, S: InternableSpan> Writer<'a, '_, S> { - fn write(&mut self, root: tt::SubtreeView<'a, S>) { +impl<'a, T: SpanTransformer> Writer<'a, '_, T> { + fn write(&mut self, root: tt::SubtreeView<'a, T::Span>) { let subtree = root.top_subtree(); self.enqueue(subtree, root.iter()); while let Some((idx, subtree)) = self.work.pop_front() { @@ -403,11 +414,11 @@ impl<'a, S: InternableSpan> Writer<'a, '_, S> { } } - fn token_id_of(&mut self, span: S) -> TokenId { - S::token_id_of(self.span_data_table, span) + fn token_id_of(&mut self, span: T::Span) -> SpanId { + T::token_id_of(self.span_data_table, span) } - fn subtree(&mut self, idx: usize, subtree: tt::iter::TtIter<'a, S>) { + fn subtree(&mut self, idx: usize, subtree: tt::iter::TtIter<'a, T::Span>) { let mut first_tt = self.token_tree.len(); let n_tt = subtree.clone().count(); // FIXME: `count()` walks over the entire iterator. self.token_tree.resize(first_tt + n_tt, !0); @@ -478,7 +489,11 @@ impl<'a, S: InternableSpan> Writer<'a, '_, S> { } } - fn enqueue(&mut self, subtree: &'a tt::Subtree, contents: tt::iter::TtIter<'a, S>) -> u32 { + fn enqueue( + &mut self, + subtree: &'a tt::Subtree, + contents: tt::iter::TtIter<'a, T::Span>, + ) -> u32 { let idx = self.subtree.len(); let open = self.token_id_of(subtree.delimiter.open); let close = self.token_id_of(subtree.delimiter.close); @@ -507,7 +522,7 @@ impl<'a, S: InternableSpan> Writer<'a, '_, S> { } } -struct Reader<'span, S: InternableSpan> { +struct Reader<'span, S: SpanTransformer> { version: u32, subtree: Vec, literal: Vec, @@ -518,11 +533,11 @@ struct Reader<'span, S: InternableSpan> { span_data_table: &'span S::Table, } -impl Reader<'_, S> { - pub(crate) fn read(self) -> tt::TopSubtree { - let mut res: Vec, Vec>)>> = +impl Reader<'_, T> { + pub(crate) fn read(self) -> tt::TopSubtree { + let mut res: Vec, Vec>)>> = vec![None; self.subtree.len()]; - let read_span = |id| S::span_for_token_id(self.span_data_table, id); + let read_span = |id| T::span_for_token_id(self.span_data_table, id); for i in (0..self.subtree.len()).rev() { let repr = &self.subtree[i]; let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize]; diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs index 516c7418bde8f..97919b85b5130 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs @@ -5,24 +5,29 @@ //! is used to provide basic infrastructure for communication between two //! processes: Client (RA itself), Server (the external program) -pub mod legacy_protocol { - pub mod json; - pub mod msg; -} +pub mod legacy_protocol; mod process; use paths::{AbsPath, AbsPathBuf}; use span::{ErasedFileAstId, FIXUP_ERASED_FILE_AST_ID_MARKER, Span}; use std::{fmt, io, sync::Arc, time::SystemTime}; -use crate::{ - legacy_protocol::msg::{ - ExpandMacro, ExpandMacroData, ExpnGlobals, FlatTree, HAS_GLOBAL_SPANS, HASHED_AST_ID, - PanicMessage, RUST_ANALYZER_SPAN_SUPPORT, Request, Response, SpanDataIndexMap, - deserialize_span_data_index_map, flat::serialize_span_data_index_map, - }, - process::ProcMacroServerProcess, -}; +use crate::process::ProcMacroServerProcess; + +/// The versions of the server protocol +pub mod version { + pub const NO_VERSION_CHECK_VERSION: u32 = 0; + pub const VERSION_CHECK_VERSION: u32 = 1; + pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2; + pub const HAS_GLOBAL_SPANS: u32 = 3; + pub const RUST_ANALYZER_SPAN_SUPPORT: u32 = 4; + /// Whether literals encode their kind as an additional u32 field and idents their rawness as a u32 field. + pub const EXTENDED_LEAF_DATA: u32 = 5; + pub const HASHED_AST_ID: u32 = 6; + + /// Current API version of the proc-macro protocol. + pub const CURRENT_API_VERSION: u32 = HASHED_AST_ID; +} /// Represents different kinds of procedural macros that can be expanded by the external server. #[derive(Copy, Clone, Eq, PartialEq, Debug, serde_derive::Serialize, serde_derive::Deserialize)] @@ -163,7 +168,7 @@ impl ProcMacro { fn needs_fixup_change(&self) -> bool { let version = self.process.version(); - (RUST_ANALYZER_SPAN_SUPPORT..HASHED_AST_ID).contains(&version) + (version::RUST_ANALYZER_SPAN_SUPPORT..version::HASHED_AST_ID).contains(&version) } /// On some server versions, the fixup ast id is different than ours. So change it to match. @@ -204,7 +209,7 @@ impl ProcMacro { call_site: Span, mixed_site: Span, current_dir: String, - ) -> Result, PanicMessage>, ServerError> { + ) -> Result, String>, ServerError> { let (mut subtree, mut attr) = (subtree, attr); let (mut subtree_changed, mut attr_changed); if self.needs_fixup_change() { @@ -219,57 +224,15 @@ impl ProcMacro { } } - let version = self.process.version(); - - let mut span_data_table = SpanDataIndexMap::default(); - let def_site = span_data_table.insert_full(def_site).0; - let call_site = span_data_table.insert_full(call_site).0; - let mixed_site = span_data_table.insert_full(mixed_site).0; - let task = ExpandMacro { - data: ExpandMacroData { - macro_body: FlatTree::new(subtree, version, &mut span_data_table), - macro_name: self.name.to_string(), - attributes: attr - .map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)), - has_global_spans: ExpnGlobals { - serialize: version >= HAS_GLOBAL_SPANS, - def_site, - call_site, - mixed_site, - }, - span_data_table: if version >= RUST_ANALYZER_SPAN_SUPPORT { - serialize_span_data_index_map(&span_data_table) - } else { - Vec::new() - }, - }, - lib: self.dylib_path.to_path_buf().into(), + legacy_protocol::expand( + self, + subtree, + attr, env, - current_dir: Some(current_dir), - }; - - let response = self.process.send_task(Request::ExpandMacro(Box::new(task)))?; - - match response { - Response::ExpandMacro(it) => Ok(it.map(|tree| { - let mut expanded = FlatTree::to_subtree_resolved(tree, version, &span_data_table); - if self.needs_fixup_change() { - self.change_fixup_to_match_old_server(&mut expanded); - } - expanded - })), - Response::ExpandMacroExtended(it) => Ok(it.map(|resp| { - let mut expanded = FlatTree::to_subtree_resolved( - resp.tree, - version, - &deserialize_span_data_index_map(&resp.span_data_table), - ); - if self.needs_fixup_change() { - self.change_fixup_to_match_old_server(&mut expanded); - } - expanded - })), - _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), - } + def_site, + call_site, + mixed_site, + current_dir, + ) } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs index fcea75ef672a1..fe274a027a80f 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs @@ -12,13 +12,8 @@ use stdx::JodChild; use crate::{ ProcMacroKind, ServerError, - legacy_protocol::{ - json::{read_json, write_json}, - msg::{ - CURRENT_API_VERSION, Message, RUST_ANALYZER_SPAN_SUPPORT, Request, Response, - ServerConfig, SpanMode, - }, - }, + legacy_protocol::{self, SpanMode}, + version, }; /// Represents a process handling proc-macro communication. @@ -28,11 +23,16 @@ pub(crate) struct ProcMacroServerProcess { /// hence the lock on the state. state: Mutex, version: u32, - mode: SpanMode, + protocol: Protocol, /// Populated when the server exits. exited: OnceLock>, } +#[derive(Debug)] +enum Protocol { + LegacyJson { mode: SpanMode }, +} + /// Maintains the state of the proc-macro server process. #[derive(Debug)] struct ProcessSrvState { @@ -56,34 +56,40 @@ impl ProcMacroServerProcess { io::Result::Ok(ProcMacroServerProcess { state: Mutex::new(ProcessSrvState { process, stdin, stdout }), version: 0, - mode: SpanMode::Id, + protocol: Protocol::LegacyJson { mode: SpanMode::Id }, exited: OnceLock::new(), }) }; let mut srv = create_srv()?; tracing::info!("sending proc-macro server version check"); match srv.version_check() { - Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::other( - format!( "The version of the proc-macro server ({v}) in your Rust toolchain is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}). - This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain." - ), - )), + Ok(v) if v > version::CURRENT_API_VERSION => { + #[allow(clippy::disallowed_methods)] + let process_version = Command::new(process_path) + .arg("--version") + .output() + .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_owned()) + .unwrap_or_else(|_| "unknown version".to_owned()); + Err(io::Error::other(format!( + "Your installed proc-macro server is too new for your rust-analyzer. API version: {}, server version: {process_version}. \ + This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain.", + version::CURRENT_API_VERSION + ))) + } Ok(v) => { tracing::info!("Proc-macro server version: {v}"); srv.version = v; - if srv.version >= RUST_ANALYZER_SPAN_SUPPORT { - if let Ok(mode) = srv.enable_rust_analyzer_spans() { - srv.mode = mode; - } + if srv.version >= version::RUST_ANALYZER_SPAN_SUPPORT + && let Ok(mode) = srv.enable_rust_analyzer_spans() + { + srv.protocol = Protocol::LegacyJson { mode }; } - tracing::info!("Proc-macro server span mode: {:?}", srv.mode); + tracing::info!("Proc-macro server protocol: {:?}", srv.protocol); Ok(srv) } Err(e) => { tracing::info!(%e, "proc-macro version check failed"); - Err( - io::Error::other(format!("proc-macro server version check failed: {e}")), - ) + Err(io::Error::other(format!("proc-macro server version check failed: {e}"))) } } } @@ -98,25 +104,24 @@ impl ProcMacroServerProcess { self.version } + /// Enable support for rust-analyzer span mode if the server supports it. + pub(crate) fn rust_analyzer_spans(&self) -> bool { + match self.protocol { + Protocol::LegacyJson { mode } => mode == SpanMode::RustAnalyzer, + } + } + /// Checks the API version of the running proc-macro server. fn version_check(&self) -> Result { - let request = Request::ApiVersionCheck {}; - let response = self.send_task(request)?; - - match response { - Response::ApiVersionCheck(version) => Ok(version), - _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + match self.protocol { + Protocol::LegacyJson { .. } => legacy_protocol::version_check(self), } } /// Enable support for rust-analyzer span mode if the server supports it. fn enable_rust_analyzer_spans(&self) -> Result { - let request = Request::SetConfig(ServerConfig { span_mode: SpanMode::RustAnalyzer }); - let response = self.send_task(request)?; - - match response { - Response::SetConfig(ServerConfig { span_mode }) => Ok(span_mode), - _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + match self.protocol { + Protocol::LegacyJson { .. } => legacy_protocol::enable_rust_analyzer_spans(self), } } @@ -125,25 +130,24 @@ impl ProcMacroServerProcess { &self, dylib_path: &AbsPath, ) -> Result, String>, ServerError> { - let request = Request::ListMacros { dylib_path: dylib_path.to_path_buf().into() }; - - let response = self.send_task(request)?; - - match response { - Response::ListMacros(it) => Ok(it), - _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + match self.protocol { + Protocol::LegacyJson { .. } => legacy_protocol::find_proc_macros(self, dylib_path), } } - /// Sends a request to the proc-macro server and waits for a response. - pub(crate) fn send_task(&self, req: Request) -> Result { - if let Some(server_error) = self.exited.get() { - return Err(server_error.0.clone()); - } - + pub(crate) fn send_task( + &self, + serialize_req: impl FnOnce( + &mut dyn Write, + &mut dyn BufRead, + Request, + &mut String, + ) -> Result, ServerError>, + req: Request, + ) -> Result { let state = &mut *self.state.lock().unwrap(); let mut buf = String::new(); - send_request(&mut state.stdin, &mut state.stdout, req, &mut buf) + serialize_req(&mut state.stdin, &mut state.stdout, req, &mut buf) .and_then(|res| { res.ok_or_else(|| { let message = "proc-macro server did not respond with data".to_owned(); @@ -162,10 +166,10 @@ impl ProcMacroServerProcess { Ok(None) | Err(_) => e, Ok(Some(status)) => { let mut msg = String::new(); - if !status.success() { - if let Some(stderr) = state.process.child.stderr.as_mut() { - _ = stderr.read_to_string(&mut msg); - } + if !status.success() + && let Some(stderr) = state.process.child.stderr.as_mut() + { + _ = stderr.read_to_string(&mut msg); } let server_error = ServerError { message: format!( @@ -242,21 +246,3 @@ fn mk_child<'a>( } cmd.spawn() } - -/// Sends a request to the server and reads the response. -fn send_request( - mut writer: &mut impl Write, - mut reader: &mut impl BufRead, - req: Request, - buf: &mut String, -) -> Result, ServerError> { - req.write(write_json, &mut writer).map_err(|err| ServerError { - message: "failed to write request".into(), - io: Some(Arc::new(err)), - })?; - let res = Response::read(read_json, &mut reader, buf).map_err(|err| ServerError { - message: "failed to read response".into(), - io: Some(Arc::new(err)), - })?; - Ok(res) -} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml index ab421021b8bfd..91e9e62b084b4 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml @@ -14,10 +14,14 @@ publish = false proc-macro-srv.workspace = true proc-macro-api.workspace = true tt.workspace = true +clap = {version = "4.5.42", default-features = false, features = ["std"]} +postcard = { version = "1.1.3", optional = true } [features] +default = ["postcard"] sysroot-abi = ["proc-macro-srv/sysroot-abi"] in-rust-tree = ["proc-macro-srv/in-rust-tree", "sysroot-abi"] +postcard = ["dep:postcard"] [[bin]] diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/build.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/build.rs index 07f914fece0e2..12e7c8b05bac3 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/build.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/build.rs @@ -1,5 +1,49 @@ -//! This teaches cargo about our cfg(rust_analyzer) +//! Construct version in the `commit-hash date channel` format + +use std::{env, path::PathBuf, process::Command}; fn main() { - println!("cargo:rustc-check-cfg=cfg(rust_analyzer)"); + set_rerun(); + set_commit_info(); + println!("cargo::rustc-check-cfg=cfg(rust_analyzer)"); +} + +fn set_rerun() { + println!("cargo:rerun-if-env-changed=CFG_RELEASE"); + + let mut manifest_dir = PathBuf::from( + env::var("CARGO_MANIFEST_DIR").expect("`CARGO_MANIFEST_DIR` is always set by cargo."), + ); + + while manifest_dir.parent().is_some() { + let head_ref = manifest_dir.join(".git/HEAD"); + if head_ref.exists() { + println!("cargo:rerun-if-changed={}", head_ref.display()); + return; + } + + manifest_dir.pop(); + } + + println!("cargo:warning=Could not find `.git/HEAD` from manifest dir!"); +} + +fn set_commit_info() { + #[allow(clippy::disallowed_methods)] + let output = match Command::new("git") + .arg("log") + .arg("-1") + .arg("--date=short") + .arg("--format=%H %h %cd") + .output() + { + Ok(output) if output.status.success() => output, + _ => return, + }; + let stdout = String::from_utf8(output.stdout).unwrap(); + let mut parts = stdout.split_whitespace(); + let mut next = || parts.next().unwrap(); + println!("cargo:rustc-env=RA_COMMIT_HASH={}", next()); + println!("cargo:rustc-env=RA_COMMIT_SHORT_HASH={}", next()); + println!("cargo:rustc-env=RA_COMMIT_DATE={}", next()) } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs index c47ed053254bf..662d34865effe 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs @@ -2,13 +2,16 @@ //! Driver for proc macro server #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![cfg_attr(not(feature = "sysroot-abi"), allow(unused_crate_dependencies))] -#![allow(clippy::print_stderr)] +#![allow(clippy::print_stdout, clippy::print_stderr)] #[cfg(feature = "in-rust-tree")] extern crate rustc_driver as _; +mod version; + #[cfg(any(feature = "sysroot-abi", rust_analyzer))] mod main_loop; +use clap::{Command, ValueEnum}; #[cfg(any(feature = "sysroot-abi", rust_analyzer))] use main_loop::run; @@ -23,12 +26,59 @@ fn main() -> std::io::Result<()> { ); std::process::exit(122); } + let matches = Command::new("proc-macro-srv") + .args(&[ + clap::Arg::new("format") + .long("format") + .action(clap::ArgAction::Set) + .default_value("json") + .value_parser(clap::builder::EnumValueParser::::new()), + clap::Arg::new("version") + .long("version") + .action(clap::ArgAction::SetTrue) + .help("Prints the version of the proc-macro-srv"), + ]) + .get_matches(); + if matches.get_flag("version") { + println!("rust-analyzer-proc-macro-srv {}", version::version()); + return Ok(()); + } + let &format = + matches.get_one::("format").expect("format value should always be present"); + run(format) +} + +#[derive(Copy, Clone)] +enum ProtocolFormat { + Json, + #[cfg(feature = "postcard")] + Postcard, +} - run() +impl ValueEnum for ProtocolFormat { + fn value_variants<'a>() -> &'a [Self] { + &[ProtocolFormat::Json] + } + + fn to_possible_value(&self) -> Option { + match self { + ProtocolFormat::Json => Some(clap::builder::PossibleValue::new("json")), + #[cfg(feature = "postcard")] + ProtocolFormat::Postcard => Some(clap::builder::PossibleValue::new("postcard")), + } + } + fn from_str(input: &str, _ignore_case: bool) -> Result { + match input { + "json" => Ok(ProtocolFormat::Json), + #[cfg(feature = "postcard")] + "postcard" => Ok(ProtocolFormat::Postcard), + _ => Err(format!("unknown protocol format: {input}")), + } + } } #[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))] -fn run() -> std::io::Result<()> { +fn run(_: ProtocolFormat) -> std::io::Result<()> { Err(std::io::Error::new( std::io::ErrorKind::Unsupported, "proc-macro-srv-cli needs to be compiled with the `sysroot-abi` feature to function" diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs index f54dff1f2d822..703bc965db25c 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs @@ -1,16 +1,48 @@ //! The main loop of the proc-macro server. use std::io; -use proc_macro_api::legacy_protocol::{ - json::{read_json, write_json}, - msg::{ - self, CURRENT_API_VERSION, ExpandMacroData, ExpnGlobals, Message, SpanMode, TokenId, - deserialize_span_data_index_map, serialize_span_data_index_map, +use proc_macro_api::{ + legacy_protocol::{ + json::{read_json, write_json}, + msg::{ + self, ExpandMacroData, ExpnGlobals, Message, SpanMode, SpanTransformer, + deserialize_span_data_index_map, serialize_span_data_index_map, + }, }, + version::CURRENT_API_VERSION, }; -use proc_macro_srv::EnvSnapshot; +use proc_macro_srv::{EnvSnapshot, SpanId}; -pub(crate) fn run() -> io::Result<()> { +use crate::ProtocolFormat; + +struct SpanTrans; + +impl SpanTransformer for SpanTrans { + type Table = (); + type Span = SpanId; + fn token_id_of( + _: &mut Self::Table, + span: Self::Span, + ) -> proc_macro_api::legacy_protocol::SpanId { + proc_macro_api::legacy_protocol::SpanId(span.0) + } + fn span_for_token_id( + _: &Self::Table, + id: proc_macro_api::legacy_protocol::SpanId, + ) -> Self::Span { + SpanId(id.0) + } +} + +pub(crate) fn run(format: ProtocolFormat) -> io::Result<()> { + match format { + ProtocolFormat::Json => run_json(), + #[cfg(feature = "postcard")] + ProtocolFormat::Postcard => unimplemented!(), + } +} + +fn run_json() -> io::Result<()> { fn macro_kind_to_api(kind: proc_macro_srv::ProcMacroKind) -> proc_macro_api::ProcMacroKind { match kind { proc_macro_srv::ProcMacroKind::CustomDerive => { @@ -54,19 +86,20 @@ pub(crate) fn run() -> io::Result<()> { } = *task; match span_mode { SpanMode::Id => msg::Response::ExpandMacro({ - let def_site = TokenId(def_site as u32); - let call_site = TokenId(call_site as u32); - let mixed_site = TokenId(mixed_site as u32); + let def_site = SpanId(def_site as u32); + let call_site = SpanId(call_site as u32); + let mixed_site = SpanId(mixed_site as u32); - let macro_body = macro_body.to_subtree_unresolved(CURRENT_API_VERSION); - let attributes = - attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); + let macro_body = + macro_body.to_subtree_unresolved::(CURRENT_API_VERSION); + let attributes = attributes + .map(|it| it.to_subtree_unresolved::(CURRENT_API_VERSION)); srv.expand( lib, - env, + &env, current_dir, - macro_name, + ¯o_name, macro_body, attributes, def_site, @@ -74,8 +107,12 @@ pub(crate) fn run() -> io::Result<()> { mixed_site, ) .map(|it| { - msg::FlatTree::new_raw(tt::SubtreeView::new(&it), CURRENT_API_VERSION) + msg::FlatTree::new_raw::( + tt::SubtreeView::new(&it), + CURRENT_API_VERSION, + ) }) + .map_err(|e| e.into_string().unwrap_or_default()) .map_err(msg::PanicMessage) }), SpanMode::RustAnalyzer => msg::Response::ExpandMacroExtended({ @@ -92,9 +129,9 @@ pub(crate) fn run() -> io::Result<()> { }); srv.expand( lib, - env, + &env, current_dir, - macro_name, + ¯o_name, macro_body, attributes, def_site, @@ -115,6 +152,7 @@ pub(crate) fn run() -> io::Result<()> { tree, span_data_table, }) + .map_err(|e| e.into_string().unwrap_or_default()) .map_err(msg::PanicMessage) }), } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/version.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/version.rs new file mode 100644 index 0000000000000..32499d055d1e0 --- /dev/null +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/version.rs @@ -0,0 +1,58 @@ +//! Code for representing rust-analyzer's release version number. +#![expect(dead_code)] + +use std::fmt; + +/// Information about the git repository where rust-analyzer was built from. +pub(crate) struct CommitInfo { + pub(crate) short_commit_hash: &'static str, + pub(crate) commit_hash: &'static str, + pub(crate) commit_date: &'static str, +} + +/// Cargo's version. +pub(crate) struct VersionInfo { + /// rust-analyzer's version, such as "1.57.0", "1.58.0-beta.1", "1.59.0-nightly", etc. + pub(crate) version: &'static str, + /// The release channel we were built for (stable/beta/nightly/dev). + /// + /// `None` if not built via bootstrap. + pub(crate) release_channel: Option<&'static str>, + /// Information about the Git repository we may have been built from. + /// + /// `None` if not built from a git repo. + pub(crate) commit_info: Option, +} + +impl fmt::Display for VersionInfo { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.version)?; + + if let Some(ci) = &self.commit_info { + write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?; + }; + Ok(()) + } +} + +/// Returns information about cargo's version. +pub(crate) const fn version() -> VersionInfo { + let version = match option_env!("CFG_RELEASE") { + Some(x) => x, + None => "0.0.0", + }; + + let release_channel = option_env!("CFG_RELEASE_CHANNEL"); + let commit_info = match ( + option_env!("RA_COMMIT_SHORT_HASH"), + option_env!("RA_COMMIT_HASH"), + option_env!("RA_COMMIT_DATE"), + ) { + (Some(short_commit_hash), Some(commit_hash), Some(commit_date)) => { + Some(CommitInfo { short_commit_hash, commit_hash, commit_date }) + } + _ => None, + }; + + VersionInfo { version, release_channel, commit_info } +} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml index 4034f244393bf..d037e715e703f 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml @@ -16,6 +16,7 @@ doctest = false object.workspace = true libloading.workspace = true memmap2.workspace = true +temp-dir.workspace = true tt.workspace = true syntax-bridge.workspace = true @@ -26,6 +27,7 @@ intern.workspace = true ra-ap-rustc_lexer.workspace = true + [target.'cfg(unix)'.dependencies] libc.workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs index c49159df9916d..c8513a10675da 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs @@ -1,65 +1,66 @@ //! Handles dynamic library loading for proc macro +mod proc_macros; mod version; use proc_macro::bridge; use std::{fmt, fs, io, time::SystemTime}; +use temp_dir::TempDir; use libloading::Library; use object::Object; use paths::{Utf8Path, Utf8PathBuf}; -use crate::{ProcMacroKind, ProcMacroSrvSpan, proc_macros::ProcMacros, server_impl::TopSubtree}; +use crate::{ + PanicMessage, ProcMacroKind, ProcMacroSrvSpan, dylib::proc_macros::ProcMacros, + server_impl::TopSubtree, +}; -/// Loads dynamic library in platform dependent manner. -/// -/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described -/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample) -/// and [here](https://github.com/rust-lang/rust/issues/60593). -/// -/// Usage of RTLD_DEEPBIND -/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1) -/// -/// It seems that on Windows that behaviour is default, so we do nothing in that case. -/// -/// # Safety -/// -/// The caller is responsible for ensuring that the path is valid proc-macro library -#[cfg(windows)] -unsafe fn load_library(file: &Utf8Path) -> Result { - // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library - unsafe { Library::new(file) } +pub(crate) struct Expander { + inner: ProcMacroLibrary, + modified_time: SystemTime, } -/// Loads dynamic library in platform dependent manner. -/// -/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described -/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample) -/// and [here](https://github.com/rust-lang/rust/issues/60593). -/// -/// Usage of RTLD_DEEPBIND -/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1) -/// -/// It seems that on Windows that behaviour is default, so we do nothing in that case. -/// -/// # Safety -/// -/// The caller is responsible for ensuring that the path is valid proc-macro library -#[cfg(unix)] -unsafe fn load_library(file: &Utf8Path) -> Result { - // not defined by POSIX, different values on mips vs other targets - #[cfg(target_env = "gnu")] - use libc::RTLD_DEEPBIND; - use libloading::os::unix::Library as UnixLibrary; - // defined by POSIX - use libloading::os::unix::RTLD_NOW; +impl Expander { + pub(crate) fn new( + temp_dir: &TempDir, + lib: &Utf8Path, + ) -> Result { + // Some libraries for dynamic loading require canonicalized path even when it is + // already absolute + let lib = lib.canonicalize_utf8()?; + let modified_time = fs::metadata(&lib).and_then(|it| it.modified())?; - // MUSL and bionic don't have it.. - #[cfg(not(target_env = "gnu"))] - const RTLD_DEEPBIND: std::os::raw::c_int = 0x0; + let path = ensure_file_with_lock_free_access(temp_dir, &lib)?; + let library = ProcMacroLibrary::open(path.as_ref())?; - // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library - unsafe { UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into()) } + Ok(Expander { inner: library, modified_time }) + } + + pub(crate) fn expand( + &self, + macro_name: &str, + macro_body: TopSubtree, + attributes: Option>, + def_site: S, + call_site: S, + mixed_site: S, + ) -> Result, PanicMessage> + where + ::TokenStream: Default, + { + self.inner + .proc_macros + .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site) + } + + pub(crate) fn list_macros(&self) -> impl Iterator { + self.inner.proc_macros.list_macros() + } + + pub(crate) fn modified_time(&self) -> SystemTime { + self.modified_time + } } #[derive(Debug)] @@ -133,54 +134,6 @@ impl ProcMacroLibrary { } } -// Drop order matters as we can't remove the dylib before the library is unloaded -pub(crate) struct Expander { - inner: ProcMacroLibrary, - _remove_on_drop: RemoveFileOnDrop, - modified_time: SystemTime, -} - -impl Expander { - pub(crate) fn new(lib: &Utf8Path) -> Result { - // Some libraries for dynamic loading require canonicalized path even when it is - // already absolute - let lib = lib.canonicalize_utf8()?; - let modified_time = fs::metadata(&lib).and_then(|it| it.modified())?; - - let path = ensure_file_with_lock_free_access(&lib)?; - let library = ProcMacroLibrary::open(path.as_ref())?; - - Ok(Expander { inner: library, _remove_on_drop: RemoveFileOnDrop(path), modified_time }) - } - - pub(crate) fn expand( - &self, - macro_name: &str, - macro_body: TopSubtree, - attributes: Option>, - def_site: S, - call_site: S, - mixed_site: S, - ) -> Result, String> - where - ::TokenStream: Default, - { - let result = self - .inner - .proc_macros - .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site); - result.map_err(|e| e.into_string().unwrap_or_default()) - } - - pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { - self.inner.proc_macros.list_macros() - } - - pub(crate) fn modified_time(&self) -> SystemTime { - self.modified_time - } -} - fn invalid_data_err(e: impl Into>) -> io::Error { io::Error::new(io::ErrorKind::InvalidData, e) } @@ -210,18 +163,12 @@ fn find_registrar_symbol(obj: &object::File<'_>) -> object::Result io::Result { +fn ensure_file_with_lock_free_access( + temp_dir: &TempDir, + path: &Utf8Path, +) -> io::Result { use std::collections::hash_map::RandomState; use std::hash::{BuildHasher, Hasher}; @@ -229,9 +176,7 @@ fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result return Ok(path.to_path_buf()); } - let mut to = Utf8PathBuf::from_path_buf(std::env::temp_dir()).unwrap(); - to.push("rust-analyzer-proc-macros"); - _ = fs::create_dir(&to); + let mut to = Utf8Path::from_path(temp_dir.path()).unwrap().to_owned(); let file_name = path.file_stem().ok_or_else(|| { io::Error::new(io::ErrorKind::InvalidInput, format!("File path is invalid: {path}")) @@ -248,6 +193,60 @@ fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result } #[cfg(unix)] -fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result { +fn ensure_file_with_lock_free_access( + _temp_dir: &TempDir, + path: &Utf8Path, +) -> io::Result { Ok(path.to_owned()) } + +/// Loads dynamic library in platform dependent manner. +/// +/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described +/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample) +/// and [here](https://github.com/rust-lang/rust/issues/60593). +/// +/// Usage of RTLD_DEEPBIND +/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1) +/// +/// It seems that on Windows that behaviour is default, so we do nothing in that case. +/// +/// # Safety +/// +/// The caller is responsible for ensuring that the path is valid proc-macro library +#[cfg(windows)] +unsafe fn load_library(file: &Utf8Path) -> Result { + // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library + unsafe { Library::new(file) } +} + +/// Loads dynamic library in platform dependent manner. +/// +/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described +/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample) +/// and [here](https://github.com/rust-lang/rust/issues/60593). +/// +/// Usage of RTLD_DEEPBIND +/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1) +/// +/// It seems that on Windows that behaviour is default, so we do nothing in that case. +/// +/// # Safety +/// +/// The caller is responsible for ensuring that the path is valid proc-macro library +#[cfg(unix)] +unsafe fn load_library(file: &Utf8Path) -> Result { + // not defined by POSIX, different values on mips vs other targets + #[cfg(target_env = "gnu")] + use libc::RTLD_DEEPBIND; + use libloading::os::unix::Library as UnixLibrary; + // defined by POSIX + use libloading::os::unix::RTLD_NOW; + + // MUSL and bionic don't have it.. + #[cfg(not(target_env = "gnu"))] + const RTLD_DEEPBIND: std::os::raw::c_int = 0x0; + + // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library + unsafe { UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into()) } +} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/proc_macros.rs similarity index 81% rename from src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs rename to src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/proc_macros.rs index 18532706c4aaa..9b5721e370ace 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/proc_macros.rs @@ -75,20 +75,13 @@ impl ProcMacros { Err(bridge::PanicMessage::String(format!("proc-macro `{macro_name}` is missing")).into()) } - pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { - self.0 - .iter() - .map(|proc_macro| match proc_macro { - bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { - (trait_name.to_string(), ProcMacroKind::CustomDerive) - } - bridge::client::ProcMacro::Bang { name, .. } => { - (name.to_string(), ProcMacroKind::Bang) - } - bridge::client::ProcMacro::Attr { name, .. } => { - (name.to_string(), ProcMacroKind::Attr) - } - }) - .collect() + pub(crate) fn list_macros(&self) -> impl Iterator { + self.0.iter().map(|proc_macro| match *proc_macro { + bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { + (trait_name, ProcMacroKind::CustomDerive) + } + bridge::client::ProcMacro::Bang { name, .. } => (name, ProcMacroKind::Bang), + bridge::client::ProcMacro::Attr { name, .. } => (name, ProcMacroKind::Attr), + }) } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs index 223c5a54b7034..cb97882c58541 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs @@ -27,7 +27,6 @@ extern crate ra_ap_rustc_lexer as rustc_lexer; extern crate rustc_lexer; mod dylib; -mod proc_macros; mod server_impl; use std::{ @@ -41,10 +40,13 @@ use std::{ }; use paths::{Utf8Path, Utf8PathBuf}; -use span::{Span, TokenId}; +use span::Span; +use temp_dir::TempDir; use crate::server_impl::TokenStream; +pub use crate::server_impl::token_id::SpanId; + #[derive(Copy, Clone, Eq, PartialEq, Debug)] pub enum ProcMacroKind { CustomDerive, @@ -57,11 +59,16 @@ pub const RUSTC_VERSION_STRING: &str = env!("RUSTC_VERSION"); pub struct ProcMacroSrv<'env> { expanders: Mutex>>, env: &'env EnvSnapshot, + temp_dir: TempDir, } impl<'env> ProcMacroSrv<'env> { pub fn new(env: &'env EnvSnapshot) -> Self { - Self { expanders: Default::default(), env } + Self { + expanders: Default::default(), + env, + temp_dir: TempDir::with_prefix("proc-macro-srv").unwrap(), + } } } @@ -71,18 +78,19 @@ impl ProcMacroSrv<'_> { pub fn expand( &self, lib: impl AsRef, - env: Vec<(String, String)>, + env: &[(String, String)], current_dir: Option>, - macro_name: String, + macro_name: &str, macro_body: tt::TopSubtree, attribute: Option>, def_site: S, call_site: S, mixed_site: S, - ) -> Result>, String> { + ) -> Result>, PanicMessage> { let snapped_env = self.env; - let expander = - self.expander(lib.as_ref()).map_err(|err| format!("failed to load macro: {err}"))?; + let expander = self.expander(lib.as_ref()).map_err(|err| PanicMessage { + message: Some(format!("failed to load macro: {err}")), + })?; let prev_env = EnvChange::apply(snapped_env, env, current_dir.as_ref().map(<_>::as_ref)); @@ -91,11 +99,11 @@ impl ProcMacroSrv<'_> { let result = thread::scope(|s| { let thread = thread::Builder::new() .stack_size(EXPANDER_STACK_SIZE) - .name(macro_name.clone()) + .name(macro_name.to_owned()) .spawn_scoped(s, move || { expander .expand( - ¯o_name, + macro_name, server_impl::TopSubtree(macro_body.0.into_vec()), attribute.map(|it| server_impl::TopSubtree(it.0.into_vec())), def_site, @@ -104,12 +112,7 @@ impl ProcMacroSrv<'_> { ) .map(|tt| tt.0) }); - let res = match thread { - Ok(handle) => handle.join(), - Err(e) => return Err(e.to_string()), - }; - - match res { + match thread.unwrap().join() { Ok(res) => res, Err(e) => std::panic::resume_unwind(e), } @@ -124,12 +127,12 @@ impl ProcMacroSrv<'_> { dylib_path: &Utf8Path, ) -> Result, String> { let expander = self.expander(dylib_path)?; - Ok(expander.list_macros()) + Ok(expander.list_macros().map(|(k, v)| (k.to_owned(), v)).collect()) } fn expander(&self, path: &Utf8Path) -> Result, String> { let expander = || { - let expander = dylib::Expander::new(path) + let expander = dylib::Expander::new(&self.temp_dir, path) .map_err(|err| format!("Cannot create expander for {path}: {err}",)); expander.map(Arc::new) }; @@ -159,8 +162,8 @@ pub trait ProcMacroSrvSpan: Copy + Send { fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server; } -impl ProcMacroSrvSpan for TokenId { - type Server = server_impl::token_id::TokenIdServer; +impl ProcMacroSrvSpan for SpanId { + type Server = server_impl::token_id::SpanIdServer; fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { Self::Server { call_site, def_site, mixed_site } @@ -178,6 +181,8 @@ impl ProcMacroSrvSpan for Span { } } } + +#[derive(Debug, Clone)] pub struct PanicMessage { message: Option, } @@ -201,7 +206,7 @@ impl Default for EnvSnapshot { static ENV_LOCK: std::sync::Mutex<()> = std::sync::Mutex::new(()); struct EnvChange<'snap> { - changed_vars: Vec, + changed_vars: Vec<&'snap str>, prev_working_dir: Option, snap: &'snap EnvSnapshot, _guard: std::sync::MutexGuard<'snap, ()>, @@ -210,7 +215,7 @@ struct EnvChange<'snap> { impl<'snap> EnvChange<'snap> { fn apply( snap: &'snap EnvSnapshot, - new_vars: Vec<(String, String)>, + new_vars: &'snap [(String, String)], current_dir: Option<&Path>, ) -> EnvChange<'snap> { let guard = ENV_LOCK.lock().unwrap_or_else(std::sync::PoisonError::into_inner); @@ -230,11 +235,11 @@ impl<'snap> EnvChange<'snap> { EnvChange { snap, changed_vars: new_vars - .into_iter() + .iter() .map(|(k, v)| { // SAFETY: We have acquired the environment lock - unsafe { env::set_var(&k, v) }; - k + unsafe { env::set_var(k, v) }; + &**k }) .collect(), prev_working_dir, @@ -257,14 +262,14 @@ impl Drop for EnvChange<'_> { } } - if let Some(dir) = &self.prev_working_dir { - if let Err(err) = std::env::set_current_dir(dir) { - eprintln!( - "Failed to set the current working dir to {}. Error: {:?}", - dir.display(), - err - ) - } + if let Some(dir) = &self.prev_working_dir + && let Err(err) = std::env::set_current_dir(dir) + { + eprintln!( + "Failed to set the current working dir to {}. Error: {:?}", + dir.display(), + err + ) } } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs index 662f6257642f0..32ad32731ba6c 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs @@ -209,7 +209,7 @@ pub(super) fn from_token_tree( token_trees.push(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { spacing: tt::Spacing::Alone, span: literal.span, - char: '-' as char, + char: '-', }))); symbol = Symbol::intern(&symbol.as_str()[1..]); } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs index b493b325e830f..91e70ea243ae4 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs @@ -1,4 +1,4 @@ -//! proc-macro server backend based on [`proc_macro_api::msg::TokenId`] as the backing span. +//! proc-macro server backend based on [`proc_macro_api::msg::SpanId`] as the backing span. //! This backend is rather inflexible, used by RustRover and older rust-analyzer versions. use std::ops::{Bound, Range}; @@ -7,25 +7,34 @@ use proc_macro::bridge::{self, server}; use crate::server_impl::{from_token_tree, literal_from_str, token_stream::TokenStreamBuilder}; -type Span = span::TokenId; +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct SpanId(pub u32); + +impl std::fmt::Debug for SpanId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +type Span = SpanId; type TokenStream = crate::server_impl::TokenStream; pub struct FreeFunctions; -pub struct TokenIdServer { +pub struct SpanIdServer { pub call_site: Span, pub def_site: Span, pub mixed_site: Span, } -impl server::Types for TokenIdServer { +impl server::Types for SpanIdServer { type FreeFunctions = FreeFunctions; type TokenStream = TokenStream; type Span = Span; type Symbol = Symbol; } -impl server::FreeFunctions for TokenIdServer { +impl server::FreeFunctions for SpanIdServer { fn injected_env_var(&mut self, _: &str) -> Option { None } @@ -41,7 +50,7 @@ impl server::FreeFunctions for TokenIdServer { fn emit_diagnostic(&mut self, _: bridge::Diagnostic) {} } -impl server::TokenStream for TokenIdServer { +impl server::TokenStream for SpanIdServer { fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { stream.is_empty() } @@ -102,12 +111,12 @@ impl server::TokenStream for TokenIdServer { &mut self, stream: Self::TokenStream, ) -> Vec> { - // Can't join with `TokenId`. + // Can't join with `SpanId`. stream.into_bridge(&mut |first, _second| first) } } -impl server::Span for TokenIdServer { +impl server::Span for SpanIdServer { fn debug(&mut self, span: Self::Span) -> String { format!("{:?}", span.0) } @@ -174,14 +183,14 @@ impl server::Span for TokenIdServer { } } -impl server::Symbol for TokenIdServer { +impl server::Symbol for SpanIdServer { fn normalize_and_validate_ident(&mut self, string: &str) -> Result { // FIXME: nfc-normalize and validate idents Ok(::intern_symbol(string)) } } -impl server::Server for TokenIdServer { +impl server::Server for SpanIdServer { fn globals(&mut self) -> bridge::ExpnGlobals { bridge::ExpnGlobals { def_site: self.def_site, @@ -201,8 +210,6 @@ impl server::Server for TokenIdServer { #[cfg(test)] mod tests { - use span::TokenId; - use super::*; #[test] @@ -211,18 +218,18 @@ mod tests { token_trees: vec![ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym: Symbol::intern("struct"), - span: TokenId(0), + span: SpanId(0), is_raw: tt::IdentIsRaw::No, })), tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym: Symbol::intern("T"), - span: TokenId(0), + span: SpanId(0), is_raw: tt::IdentIsRaw::No, })), tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: TokenId(0), - close: TokenId(0), + open: SpanId(0), + close: SpanId(0), kind: tt::DelimiterKind::Brace, }, len: 0, @@ -238,8 +245,8 @@ mod tests { let subtree_paren_a = vec![ tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: TokenId(0), - close: TokenId(0), + open: SpanId(0), + close: SpanId(0), kind: tt::DelimiterKind::Parenthesis, }, len: 1, @@ -247,24 +254,24 @@ mod tests { tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { is_raw: tt::IdentIsRaw::No, sym: Symbol::intern("a"), - span: TokenId(0), + span: SpanId(0), })), ]; - let t1 = TokenStream::from_str("(a)", TokenId(0)).unwrap(); + let t1 = TokenStream::from_str("(a)", SpanId(0)).unwrap(); assert_eq!(t1.token_trees.len(), 2); assert!(t1.token_trees[0..2] == subtree_paren_a); - let t2 = TokenStream::from_str("(a);", TokenId(0)).unwrap(); + let t2 = TokenStream::from_str("(a);", SpanId(0)).unwrap(); assert_eq!(t2.token_trees.len(), 3); assert!(t2.token_trees[0..2] == subtree_paren_a); - let underscore = TokenStream::from_str("_", TokenId(0)).unwrap(); + let underscore = TokenStream::from_str("_", SpanId(0)).unwrap(); assert!( underscore.token_trees[0] == tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym: Symbol::intern("_"), - span: TokenId(0), + span: SpanId(0), is_raw: tt::IdentIsRaw::No, })) ); diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs index 10af5662b5c05..f5a76e30bbcba 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs @@ -1,14 +1,12 @@ //! utils used in proc-macro tests use expect_test::Expect; -use span::{ - EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext, TokenId, -}; +use span::{EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext}; use tt::TextRange; -use crate::{EnvSnapshot, ProcMacroSrv, dylib, proc_macro_test_dylib_path}; +use crate::{EnvSnapshot, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path}; -fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream { +fn parse_string(call_site: SpanId, src: &str) -> crate::server_impl::TokenStream { crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree( syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src) .unwrap() @@ -57,11 +55,11 @@ fn assert_expand_impl( expect_spanned: Expect, ) { let path = proc_macro_test_dylib_path(); - let expander = dylib::Expander::new(&path).unwrap(); + let expander = dylib::Expander::new(&temp_dir::TempDir::new().unwrap(), &path).unwrap(); - let def_site = TokenId(0); - let call_site = TokenId(1); - let mixed_site = TokenId(2); + let def_site = SpanId(0); + let call_site = SpanId(1); + let mixed_site = SpanId(2); let input_ts = parse_string(call_site, input).into_subtree(call_site); let attr_ts = attr.map(|attr| parse_string(call_site, attr).into_subtree(call_site)); let input_ts_string = format!("{input_ts:?}"); diff --git a/src/tools/rust-analyzer/crates/query-group-macro/src/lib.rs b/src/tools/rust-analyzer/crates/query-group-macro/src/lib.rs index ec4b6b2a4ac3c..277cc0b269d71 100644 --- a/src/tools/rust-analyzer/crates/query-group-macro/src/lib.rs +++ b/src/tools/rust-analyzer/crates/query-group-macro/src/lib.rs @@ -278,15 +278,15 @@ pub(crate) fn query_group_impl( return Err(syn::Error::new(signature.span(), "Queries must have a return type")); }; - if let syn::Type::Path(ref ty_path) = *return_ty { - if matches!(query_kind, QueryKind::Input) { - let field = InputStructField { - name: method_name.to_token_stream(), - ty: ty_path.path.to_token_stream(), - }; - - input_struct_fields.push(field); - } + if let syn::Type::Path(ref ty_path) = *return_ty + && matches!(query_kind, QueryKind::Input) + { + let field = InputStructField { + name: method_name.to_token_stream(), + ty: ty_path.path.to_token_stream(), + }; + + input_struct_fields.push(field); } if let Some(block) = &mut method.default { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs index 4dba97c8ec492..ab045e0bf9ff1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs @@ -122,12 +122,12 @@ fn setup_logging(log_file_flag: Option) -> anyhow::Result<()> { // directory which we set to the project workspace. // https://docs.microsoft.com/en-us/windows-hardware/drivers/debugger/general-environment-variables // https://docs.microsoft.com/en-us/windows/win32/api/dbghelp/nf-dbghelp-syminitialize - if let Ok(path) = env::current_exe() { - if let Some(path) = path.parent() { - // SAFETY: This is safe because this is single-threaded. - unsafe { - env::set_var("_NT_SYMBOL_PATH", path); - } + if let Ok(path) = env::current_exe() + && let Some(path) = path.parent() + { + // SAFETY: This is safe because this is single-threaded. + unsafe { + env::set_var("_NT_SYMBOL_PATH", path); } } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index 4f75d14834c64..97886844a9f9e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -136,34 +136,30 @@ impl flags::AnalysisStats { for source_root_id in source_roots { let source_root = db.source_root(source_root_id).source_root(db); for file_id in source_root.iter() { - if let Some(p) = source_root.path_for_file(&file_id) { - if let Some((_, Some("rs"))) = p.name_and_extension() { - // measure workspace/project code - if !source_root.is_library || self.with_deps { - let length = db.file_text(file_id).text(db).lines().count(); - let item_stats = db - .file_item_tree( - EditionedFileId::current_edition(db, file_id).into(), - ) - .item_tree_stats() - .into(); - - workspace_loc += length; - workspace_item_trees += 1; - workspace_item_stats += item_stats; - } else { - let length = db.file_text(file_id).text(db).lines().count(); - let item_stats = db - .file_item_tree( - EditionedFileId::current_edition(db, file_id).into(), - ) - .item_tree_stats() - .into(); - - dep_loc += length; - dep_item_trees += 1; - dep_item_stats += item_stats; - } + if let Some(p) = source_root.path_for_file(&file_id) + && let Some((_, Some("rs"))) = p.name_and_extension() + { + // measure workspace/project code + if !source_root.is_library || self.with_deps { + let length = db.file_text(file_id).text(db).lines().count(); + let item_stats = db + .file_item_tree(EditionedFileId::current_edition(db, file_id).into()) + .item_tree_stats() + .into(); + + workspace_loc += length; + workspace_item_trees += 1; + workspace_item_stats += item_stats; + } else { + let length = db.file_text(file_id).text(db).lines().count(); + let item_stats = db + .file_item_tree(EditionedFileId::current_edition(db, file_id).into()) + .item_tree_stats() + .into(); + + dep_loc += length; + dep_item_trees += 1; + dep_item_stats += item_stats; } } } @@ -560,29 +556,35 @@ impl flags::AnalysisStats { std::fs::write(path, txt).unwrap(); let res = ws.run_build_scripts(&cargo_config, &|_| ()).unwrap(); - if let Some(err) = res.error() { - if err.contains("error: could not compile") { - if let Some(mut err_idx) = err.find("error[E") { - err_idx += 7; - let err_code = &err[err_idx..err_idx + 4]; - match err_code { - "0282" | "0283" => continue, // Byproduct of testing method - "0277" | "0308" if generated.contains(&todo) => continue, // See https://github.com/rust-lang/rust/issues/69882 - // FIXME: In some rare cases `AssocItem::container_or_implemented_trait` returns `None` for trait methods. - // Generated code is valid in case traits are imported - "0599" if err.contains("the following trait is implemented but not in scope") => continue, - _ => (), + if let Some(err) = res.error() + && err.contains("error: could not compile") + { + if let Some(mut err_idx) = err.find("error[E") { + err_idx += 7; + let err_code = &err[err_idx..err_idx + 4]; + match err_code { + "0282" | "0283" => continue, // Byproduct of testing method + "0277" | "0308" if generated.contains(&todo) => continue, // See https://github.com/rust-lang/rust/issues/69882 + // FIXME: In some rare cases `AssocItem::container_or_implemented_trait` returns `None` for trait methods. + // Generated code is valid in case traits are imported + "0599" + if err.contains( + "the following trait is implemented but not in scope", + ) => + { + continue; } - bar.println(err); - bar.println(generated); - acc.error_codes - .entry(err_code.to_owned()) - .and_modify(|n| *n += 1) - .or_insert(1); - } else { - acc.syntax_errors += 1; - bar.println(format!("Syntax error: \n{err}")); + _ => (), } + bar.println(err); + bar.println(generated); + acc.error_codes + .entry(err_code.to_owned()) + .and_modify(|n| *n += 1) + .or_insert(1); + } else { + acc.syntax_errors += 1; + bar.println(format!("Syntax error: \n{err}")); } } } @@ -731,12 +733,11 @@ impl flags::AnalysisStats { let name = body_id.name(db).unwrap_or_else(Name::missing); let module = body_id.module(db); let display_target = module.krate().to_display_target(db); - if let Some(only_name) = self.only.as_deref() { - if name.display(db, Edition::LATEST).to_string() != only_name - && full_name(db, body_id, module) != only_name - { - continue; - } + if let Some(only_name) = self.only.as_deref() + && name.display(db, Edition::LATEST).to_string() != only_name + && full_name(db, body_id, module) != only_name + { + continue; } let msg = move || { if verbosity.is_verbose() { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs index 1b9b870a7c74c..028311388c561 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs @@ -83,11 +83,11 @@ impl<'a> ProgressReport<'a> { output.extend(text.chars().skip(common_prefix_length)); // If the new text is shorter than the old one: delete overlapping characters - if let Some(overlap_count) = self.text.len().checked_sub(text.len()) { - if overlap_count > 0 { - output += &" ".repeat(overlap_count); - output += &"\x08".repeat(overlap_count); - } + if let Some(overlap_count) = self.text.len().checked_sub(text.len()) + && overlap_count > 0 + { + output += &" ".repeat(overlap_count); + output += &"\x08".repeat(overlap_count); } let _ = io::stdout().write(output.as_bytes()); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs index 30ac93fb6f838..36ae98b321b84 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -305,10 +305,10 @@ impl flags::RustcTests { for i in walk_dir { let i = i?; let p = i.into_path(); - if let Some(f) = &self.filter { - if !p.as_os_str().to_string_lossy().contains(f) { - continue; - } + if let Some(f) = &self.filter + && !p.as_os_str().to_string_lossy().contains(f) + { + continue; } if p.extension().is_none_or(|x| x != "rs") { continue; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 9456fd8809b42..1a00295b9ac18 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -61,233 +61,312 @@ pub enum MaxSubstitutionLength { Limit(usize), } -// Defines the server-side configuration of the rust-analyzer. We generate -// *parts* of VS Code's `package.json` config from this. Run `cargo test` to -// re-generate that file. +// Defines the server-side configuration of the rust-analyzer. We generate *parts* of VS Code's +// `package.json` config from this. Run `cargo test` to re-generate that file. // -// However, editor specific config, which the server doesn't know about, should -// be specified directly in `package.json`. +// However, editor specific config, which the server doesn't know about, should be specified +// directly in `package.json`. // -// To deprecate an option by replacing it with another name use `new_name` | `old_name` so that we keep -// parsing the old name. +// To deprecate an option by replacing it with another name use `new_name` | `old_name` so that we +// keep parsing the old name. config_data! { - /// Configs that apply on a workspace-wide scope. There are 2 levels on which a global configuration can be configured + /// Configs that apply on a workspace-wide scope. There are 2 levels on which a global + /// configuration can be configured /// - /// 1. `rust-analyzer.toml` file under user's config directory (e.g ~/.config/rust-analyzer/rust-analyzer.toml) + /// 1. `rust-analyzer.toml` file under user's config directory (e.g + /// ~/.config/rust-analyzer/rust-analyzer.toml) /// 2. Client's own configurations (e.g `settings.json` on VS Code) /// - /// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen by the nearest first principle. + /// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen + /// by the nearest first principle. global: struct GlobalDefaultConfigData <- GlobalConfigInput -> { /// Warm up caches on project load. cachePriming_enable: bool = true, - /// How many worker threads to handle priming caches. The default `0` means to pick automatically. + + /// How many worker threads to handle priming caches. The default `0` means to pick + /// automatically. cachePriming_numThreads: NumThreads = NumThreads::Physical, /// Custom completion snippets. - completion_snippets_custom: FxIndexMap = Config::completion_snippets_default(), - + completion_snippets_custom: FxIndexMap = + Config::completion_snippets_default(), - /// These paths (file/directories) will be ignored by rust-analyzer. They are - /// relative to the workspace root, and globs are not supported. You may - /// also need to add the folders to Code's `files.watcherExclude`. + /// List of files to ignore + /// + /// These paths (file/directories) will be ignored by rust-analyzer. They are relative to + /// the workspace root, and globs are not supported. You may also need to add the folders to + /// Code's `files.watcherExclude`. files_exclude | files_excludeDirs: Vec = vec![], - - - /// Enables highlighting of related return values while the cursor is on any `match`, `if`, or match arm arrow (`=>`). + /// Highlight related return values while the cursor is on any `match`, `if`, or match arm + /// arrow (`=>`). highlightRelated_branchExitPoints_enable: bool = true, - /// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords. + + /// Highlight related references while the cursor is on `break`, `loop`, `while`, or `for` + /// keywords. highlightRelated_breakPoints_enable: bool = true, - /// Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure. + + /// Highlight all captures of a closure while the cursor is on the `|` or move keyword of a closure. highlightRelated_closureCaptures_enable: bool = true, - /// Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`). + + /// Highlight all exit points while the cursor is on any `return`, `?`, `fn`, or return type + /// arrow (`->`). highlightRelated_exitPoints_enable: bool = true, - /// Enables highlighting of related references while the cursor is on any identifier. + + /// Highlight related references while the cursor is on any identifier. highlightRelated_references_enable: bool = true, - /// Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords. + + /// Highlight all break points for a loop or block context while the cursor is on any + /// `async` or `await` keywords. highlightRelated_yieldPoints_enable: bool = true, - /// Whether to show `Debug` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` is set. - hover_actions_debug_enable: bool = true, - /// Whether to show HoverActions in Rust files. - hover_actions_enable: bool = true, - /// Whether to show `Go to Type Definition` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` is set. - hover_actions_gotoTypeDef_enable: bool = true, - /// Whether to show `Implementations` action. Only applies when + /// Show `Debug` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set. + hover_actions_debug_enable: bool = true, + + /// Show HoverActions in Rust files. + hover_actions_enable: bool = true, + + /// Show `Go to Type Definition` action. Only applies when /// `#rust-analyzer.hover.actions.enable#` is set. + hover_actions_gotoTypeDef_enable: bool = true, + + /// Show `Implementations` action. Only applies when `#rust-analyzer.hover.actions.enable#` + /// is set. hover_actions_implementations_enable: bool = true, - /// Whether to show `References` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` is set. - hover_actions_references_enable: bool = false, - /// Whether to show `Run` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` is set. - hover_actions_run_enable: bool = true, - /// Whether to show `Update Test` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` and `#rust-analyzer.hover.actions.run.enable#` are set. - hover_actions_updateTest_enable: bool = true, - - /// Whether to show documentation on hover. - hover_documentation_enable: bool = true, - /// Whether to show keyword hover popups. Only applies when + + /// Show `References` action. Only applies when `#rust-analyzer.hover.actions.enable#` is + /// set. + hover_actions_references_enable: bool = false, + + /// Show `Run` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set. + hover_actions_run_enable: bool = true, + + /// Show `Update Test` action. Only applies when `#rust-analyzer.hover.actions.enable#` and + /// `#rust-analyzer.hover.actions.run.enable#` are set. + hover_actions_updateTest_enable: bool = true, + + /// Show documentation on hover. + hover_documentation_enable: bool = true, + + /// Show keyword hover popups. Only applies when /// `#rust-analyzer.hover.documentation.enable#` is set. - hover_documentation_keywords_enable: bool = true, - /// Whether to show drop glue information on hover. - hover_dropGlue_enable: bool = true, + hover_documentation_keywords_enable: bool = true, + + /// Show drop glue information on hover. + hover_dropGlue_enable: bool = true, + /// Use markdown syntax for links on hover. hover_links_enable: bool = true, - /// Whether to show what types are used as generic arguments in calls etc. on hover, and what is their max length to show such types, beyond it they will be shown with ellipsis. + + /// Show what types are used as generic arguments in calls etc. on hover, and limit the max + /// length to show such types, beyond which they will be shown with ellipsis. /// - /// This can take three values: `null` means "unlimited", the string `"hide"` means to not show generic substitutions at all, and a number means to limit them to X characters. + /// This can take three values: `null` means "unlimited", the string `"hide"` means to not + /// show generic substitutions at all, and a number means to limit them to X characters. /// /// The default is 20 characters. - hover_maxSubstitutionLength: Option = Some(MaxSubstitutionLength::Limit(20)), + hover_maxSubstitutionLength: Option = + Some(MaxSubstitutionLength::Limit(20)), + /// How to render the align information in a memory layout hover. - hover_memoryLayout_alignment: Option = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal), - /// Whether to show memory layout data on hover. + hover_memoryLayout_alignment: Option = + Some(MemoryLayoutHoverRenderKindDef::Hexadecimal), + + /// Show memory layout data on hover. hover_memoryLayout_enable: bool = true, + /// How to render the niche information in a memory layout hover. hover_memoryLayout_niches: Option = Some(false), + /// How to render the offset information in a memory layout hover. - hover_memoryLayout_offset: Option = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal), + hover_memoryLayout_offset: Option = + Some(MemoryLayoutHoverRenderKindDef::Hexadecimal), + /// How to render the padding information in a memory layout hover. hover_memoryLayout_padding: Option = None, + /// How to render the size information in a memory layout hover. - hover_memoryLayout_size: Option = Some(MemoryLayoutHoverRenderKindDef::Both), + hover_memoryLayout_size: Option = + Some(MemoryLayoutHoverRenderKindDef::Both), /// How many variants of an enum to display when hovering on. Show none if empty. hover_show_enumVariants: Option = Some(5), - /// How many fields of a struct, variant or union to display when hovering on. Show none if empty. + + /// How many fields of a struct, variant or union to display when hovering on. Show none if + /// empty. hover_show_fields: Option = Some(5), + /// How many associated items of a trait to display when hovering a trait. hover_show_traitAssocItems: Option = None, - /// Whether to show inlay type hints for binding modes. - inlayHints_bindingModeHints_enable: bool = false, - /// Whether to show inlay type hints for method chains. - inlayHints_chainingHints_enable: bool = true, - /// Whether to show inlay hints after a closing `}` to indicate what item it belongs to. - inlayHints_closingBraceHints_enable: bool = true, + /// Show inlay type hints for binding modes. + inlayHints_bindingModeHints_enable: bool = false, + + /// Show inlay type hints for method chains. + inlayHints_chainingHints_enable: bool = true, + + /// Show inlay hints after a closing `}` to indicate what item it belongs to. + inlayHints_closingBraceHints_enable: bool = true, + /// Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1 /// to always show them). - inlayHints_closingBraceHints_minLines: usize = 25, - /// Whether to show inlay hints for closure captures. - inlayHints_closureCaptureHints_enable: bool = false, - /// Whether to show inlay type hints for return types of closures. - inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = ClosureReturnTypeHintsDef::Never, + inlayHints_closingBraceHints_minLines: usize = 25, + + /// Show inlay hints for closure captures. + inlayHints_closureCaptureHints_enable: bool = false, + + /// Show inlay type hints for return types of closures. + inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = + ClosureReturnTypeHintsDef::Never, + /// Closure notation in type and chaining inlay hints. - inlayHints_closureStyle: ClosureStyle = ClosureStyle::ImplFn, - /// Whether to show enum variant discriminant hints. - inlayHints_discriminantHints_enable: DiscriminantHintsDef = DiscriminantHintsDef::Never, - /// Whether to show inlay hints for type adjustments. - inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = AdjustmentHintsDef::Never, - /// Whether to hide inlay hints for type adjustments outside of `unsafe` blocks. + inlayHints_closureStyle: ClosureStyle = ClosureStyle::ImplFn, + + /// Show enum variant discriminant hints. + inlayHints_discriminantHints_enable: DiscriminantHintsDef = + DiscriminantHintsDef::Never, + + /// Show inlay hints for type adjustments. + inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = + AdjustmentHintsDef::Never, + + /// Hide inlay hints for type adjustments outside of `unsafe` blocks. inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = false, - /// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). - inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = AdjustmentHintsModeDef::Prefix, - /// Whether to show const generic parameter name inlay hints. - inlayHints_genericParameterHints_const_enable: bool= true, - /// Whether to show generic lifetime parameter name inlay hints. + + /// Show inlay hints as postfix ops (`.*` instead of `*`, etc). + inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = + AdjustmentHintsModeDef::Prefix, + + /// Show const generic parameter name inlay hints. + inlayHints_genericParameterHints_const_enable: bool = true, + + /// Show generic lifetime parameter name inlay hints. inlayHints_genericParameterHints_lifetime_enable: bool = false, - /// Whether to show generic type parameter name inlay hints. + + /// Show generic type parameter name inlay hints. inlayHints_genericParameterHints_type_enable: bool = false, - /// Whether to show implicit drop hints. - inlayHints_implicitDrops_enable: bool = false, - /// Whether to show inlay hints for the implied type parameter `Sized` bound. - inlayHints_implicitSizedBoundHints_enable: bool = false, - /// Whether to show inlay type hints for elided lifetimes in function signatures. + + /// Show implicit drop hints. + inlayHints_implicitDrops_enable: bool = false, + + /// Show inlay hints for the implied type parameter `Sized` bound. + inlayHints_implicitSizedBoundHints_enable: bool = false, + + /// Show inlay type hints for elided lifetimes in function signatures. inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = LifetimeElisionDef::Never, - /// Whether to prefer using parameter names as the name for elided lifetime hints if possible. - inlayHints_lifetimeElisionHints_useParameterNames: bool = false, + + /// Prefer using parameter names as the name for elided lifetime hints if possible. + inlayHints_lifetimeElisionHints_useParameterNames: bool = false, + /// Maximum length for inlay hints. Set to null to have an unlimited length. - inlayHints_maxLength: Option = Some(25), - /// Whether to show function parameter name inlay hints at the call - /// site. - inlayHints_parameterHints_enable: bool = true, - /// Whether to show exclusive range inlay hints. - inlayHints_rangeExclusiveHints_enable: bool = false, - /// Whether to show inlay hints for compiler inserted reborrows. - /// This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#. - inlayHints_reborrowHints_enable: ReborrowHintsDef = ReborrowHintsDef::Never, + inlayHints_maxLength: Option = Some(25), + + /// Show function parameter name inlay hints at the call site. + inlayHints_parameterHints_enable: bool = true, + + /// Show exclusive range inlay hints. + inlayHints_rangeExclusiveHints_enable: bool = false, + + /// Show inlay hints for compiler inserted reborrows. + /// + /// This setting is deprecated in favor of + /// #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#. + inlayHints_reborrowHints_enable: ReborrowHintsDef = ReborrowHintsDef::Never, + /// Whether to render leading colons for type hints, and trailing colons for parameter hints. - inlayHints_renderColons: bool = true, - /// Whether to show inlay type hints for variables. - inlayHints_typeHints_enable: bool = true, - /// Whether to hide inlay type hints for `let` statements that initialize to a closure. - /// Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`. - inlayHints_typeHints_hideClosureInitialization: bool = false, - /// Whether to hide inlay parameter type hints for closures. - inlayHints_typeHints_hideClosureParameter:bool = false, - /// Whether to hide inlay type hints for constructors. - inlayHints_typeHints_hideNamedConstructor: bool = false, - - /// Enables the experimental support for interpreting tests. + inlayHints_renderColons: bool = true, + + /// Show inlay type hints for variables. + inlayHints_typeHints_enable: bool = true, + + /// Hide inlay type hints for `let` statements that initialize to a closure. + /// + /// Only applies to closures with blocks, same as + /// `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`. + inlayHints_typeHints_hideClosureInitialization: bool = false, + + /// Hide inlay parameter type hints for closures. + inlayHints_typeHints_hideClosureParameter: bool = false, + + /// Hide inlay type hints for constructors. + inlayHints_typeHints_hideNamedConstructor: bool = false, + + /// Enable the experimental support for interpreting tests. interpret_tests: bool = false, /// Join lines merges consecutive declaration and initialization of an assignment. joinLines_joinAssignments: bool = true, + /// Join lines inserts else between consecutive ifs. joinLines_joinElseIf: bool = true, + /// Join lines removes trailing commas. joinLines_removeTrailingComma: bool = true, + /// Join lines unwraps trivial blocks. joinLines_unwrapTrivialBlock: bool = true, - /// Whether to show `Debug` lens. Only applies when - /// `#rust-analyzer.lens.enable#` is set. - lens_debug_enable: bool = true, - /// Whether to show CodeLens in Rust files. - lens_enable: bool = true, - /// Whether to show `Implementations` lens. Only applies when - /// `#rust-analyzer.lens.enable#` is set. - lens_implementations_enable: bool = true, + /// Show `Debug` lens. Only applies when `#rust-analyzer.lens.enable#` is set. + lens_debug_enable: bool = true, + + /// Show CodeLens in Rust files. + lens_enable: bool = true, + + /// Show `Implementations` lens. Only applies when `#rust-analyzer.lens.enable#` is set. + lens_implementations_enable: bool = true, + /// Where to render annotations. lens_location: AnnotationLocation = AnnotationLocation::AboveName, - /// Whether to show `References` lens for Struct, Enum, and Union. - /// Only applies when `#rust-analyzer.lens.enable#` is set. + + /// Show `References` lens for Struct, Enum, and Union. Only applies when + /// `#rust-analyzer.lens.enable#` is set. lens_references_adt_enable: bool = false, - /// Whether to show `References` lens for Enum Variants. - /// Only applies when `#rust-analyzer.lens.enable#` is set. - lens_references_enumVariant_enable: bool = false, - /// Whether to show `Method References` lens. Only applies when + + /// Show `References` lens for Enum Variants. Only applies when /// `#rust-analyzer.lens.enable#` is set. + lens_references_enumVariant_enable: bool = false, + + /// Show `Method References` lens. Only applies when `#rust-analyzer.lens.enable#` is set. lens_references_method_enable: bool = false, - /// Whether to show `References` lens for Trait. - /// Only applies when `#rust-analyzer.lens.enable#` is set. + + /// Show `References` lens for Trait. Only applies when `#rust-analyzer.lens.enable#` is + /// set. lens_references_trait_enable: bool = false, - /// Whether to show `Run` lens. Only applies when - /// `#rust-analyzer.lens.enable#` is set. - lens_run_enable: bool = true, - /// Whether to show `Update Test` lens. Only applies when - /// `#rust-analyzer.lens.enable#` and `#rust-analyzer.lens.run.enable#` are set. + + /// Show `Run` lens. Only applies when `#rust-analyzer.lens.enable#` is set. + lens_run_enable: bool = true, + + /// Show `Update Test` lens. Only applies when `#rust-analyzer.lens.enable#` and + /// `#rust-analyzer.lens.run.enable#` are set. lens_updateTest_enable: bool = true, - /// Disable project auto-discovery in favor of explicitly specified set - /// of projects. + /// Disable project auto-discovery in favor of explicitly specified set of projects. /// - /// Elements must be paths pointing to `Cargo.toml`, - /// `rust-project.json`, `.rs` files (which will be treated as standalone files) or JSON - /// objects in `rust-project.json` format. + /// Elements must be paths pointing to `Cargo.toml`, `rust-project.json`, `.rs` files (which + /// will be treated as standalone files) or JSON objects in `rust-project.json` format. linkedProjects: Vec = vec![], /// Number of syntax trees rust-analyzer keeps in memory. Defaults to 128. - lru_capacity: Option = None, - /// Sets the LRU capacity of the specified queries. + lru_capacity: Option = None, + + /// The LRU capacity of the specified queries. lru_query_capacities: FxHashMap, u16> = FxHashMap::default(), - /// Whether to show `can't find Cargo.toml` error message. - notifications_cargoTomlNotFound: bool = true, + /// Show `can't find Cargo.toml` error message. + notifications_cargoTomlNotFound: bool = true, - /// How many worker threads in the main loop. The default `null` means to pick automatically. + /// The number of worker threads in the main loop. The default `null` means to pick + /// automatically. numThreads: Option = None, /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set. procMacro_attributes_enable: bool = true, + /// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`. - procMacro_enable: bool = true, + procMacro_enable: bool = true, + /// Internal config, path to proc-macro server executable. - procMacro_server: Option = None, + procMacro_server: Option = None, /// Exclude imports from find-all-references. references_excludeImports: bool = false, @@ -300,31 +379,41 @@ config_data! { /// When enabled, rust-analyzer will highlight rust source in doc comments as well as intra /// doc links. semanticHighlighting_doc_comment_inject_enable: bool = true, - /// Whether the server is allowed to emit non-standard tokens and modifiers. + + /// Emit non-standard tokens and modifiers + /// + /// When enabled, rust-analyzer will emit tokens and modifiers that are not part of the + /// standard set of semantic tokens. semanticHighlighting_nonStandardTokens: bool = true, + /// Use semantic tokens for operators. /// /// When disabled, rust-analyzer will emit semantic tokens only for operator tokens when /// they are tagged with modifiers. semanticHighlighting_operator_enable: bool = true, + /// Use specialized semantic tokens for operators. /// /// When enabled, rust-analyzer will emit special token types for operator tokens instead /// of the generic `operator` token type. semanticHighlighting_operator_specialization_enable: bool = false, + /// Use semantic tokens for punctuation. /// /// When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when /// they are tagged with modifiers or have a special role. semanticHighlighting_punctuation_enable: bool = false, + /// When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro /// calls. semanticHighlighting_punctuation_separate_macro_bang: bool = false, + /// Use specialized semantic tokens for punctuation. /// /// When enabled, rust-analyzer will emit special token types for punctuation tokens instead /// of the generic `punctuation` token type. semanticHighlighting_punctuation_specialization_enable: bool = false, + /// Use semantic tokens for strings. /// /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars. @@ -333,16 +422,21 @@ config_data! { semanticHighlighting_strings_enable: bool = true, /// Show full signature of the callable. Only shows parameters if disabled. - signatureInfo_detail: SignatureDetail = SignatureDetail::Full, + signatureInfo_detail: SignatureDetail = SignatureDetail::Full, + /// Show documentation. - signatureInfo_documentation_enable: bool = true, + signatureInfo_documentation_enable: bool = true, /// Specify the characters allowed to invoke special on typing triggers. - /// - typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing expression + /// + /// - typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing + /// expression /// - typing `=` between two expressions adds `;` when in statement position - /// - typing `=` to turn an assignment into an equality comparison removes `;` when in expression position + /// - typing `=` to turn an assignment into an equality comparison removes `;` when in + /// expression position /// - typing `.` in a chain method call auto-indents - /// - typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the expression + /// - typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the + /// expression /// - typing `{` in a use item adds a closing `}` in the right place /// - typing `>` to complete a return type `->` will insert a whitespace after it /// - typing `<` in a path or type position inserts a closing `>` after the path or type. @@ -374,8 +468,8 @@ config_data! { /// /// **Warning**: This format is provisional and subject to change. /// - /// [`DiscoverWorkspaceConfig::command`] *must* return a JSON object - /// corresponding to `DiscoverProjectData::Finished`: + /// [`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to + /// `DiscoverProjectData::Finished`: /// /// ```norun /// #[derive(Debug, Clone, Deserialize, Serialize)] @@ -405,12 +499,11 @@ config_data! { /// } /// ``` /// - /// It is encouraged, but not required, to use the other variants on - /// `DiscoverProjectData` to provide a more polished end-user experience. + /// It is encouraged, but not required, to use the other variants on `DiscoverProjectData` + /// to provide a more polished end-user experience. /// - /// `DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, - /// which will be substituted with the JSON-serialized form of the following - /// enum: + /// `DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be + /// substituted with the JSON-serialized form of the following enum: /// /// ```norun /// #[derive(PartialEq, Clone, Debug, Serialize)] @@ -437,11 +530,10 @@ config_data! { /// } /// ``` /// - /// `DiscoverArgument::Path` is used to find and generate a `rust-project.json`, - /// and therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to - /// to update an existing workspace. As a reference for implementors, - /// buck2's `rust-project` will likely be useful: - /// https://github.com/facebook/buck2/tree/main/integrations/rust-project. + /// `DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and + /// therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an + /// existing workspace. As a reference for implementors, buck2's `rust-project` will likely + /// be useful: https://github.com/facebook/buck2/tree/main/integrations/rust-project. workspace_discoverConfig: Option = None, } } @@ -449,109 +541,154 @@ config_data! { config_data! { /// Local configurations can be defined per `SourceRoot`. This almost always corresponds to a `Crate`. local: struct LocalDefaultConfigData <- LocalConfigInput -> { - /// Whether to insert #[must_use] when generating `as_` methods - /// for enum variants. - assist_emitMustUse: bool = false, + /// Insert #[must_use] when generating `as_` methods for enum variants. + assist_emitMustUse: bool = false, + /// Placeholder expression to use for missing expressions in assists. - assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo, - /// When inserting a type (e.g. in "fill match arms" assist), prefer to use `Self` over the type name where possible. + assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo, + + /// Prefer to use `Self` over the type name when inserting a type (e.g. in "fill match arms" assist). assist_preferSelf: bool = false, - /// Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check. + + /// Enable borrow checking for term search code assists. If set to false, also there will be + /// more suggestions, but some of them may not borrow-check. assist_termSearch_borrowcheck: bool = true, + /// Term search fuel in "units of work" for assists (Defaults to 1800). assist_termSearch_fuel: usize = 1800, - - /// Whether to automatically add a semicolon when completing unit-returning functions. + /// Automatically add a semicolon when completing unit-returning functions. /// /// In `match` arms it completes a comma instead. completion_addSemicolonToUnit: bool = true, - /// Toggles the additional completions that automatically show method calls and field accesses with `await` prefixed to them when completing on a future. - completion_autoAwait_enable: bool = true, - /// Toggles the additional completions that automatically show method calls with `iter()` or `into_iter()` prefixed to them when completing on a type that has them. - completion_autoIter_enable: bool = true, - /// Toggles the additional completions that automatically add imports when completed. - /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. - completion_autoimport_enable: bool = true, + + /// Show method calls and field accesses completions with `await` prefixed to them when + /// completing on a future. + completion_autoAwait_enable: bool = true, + + /// Show method call completions with `iter()` or `into_iter()` prefixed to them when + /// completing on a type that has them. + completion_autoIter_enable: bool = true, + + /// Show completions that automatically add imports when completed. + /// + /// Note that your client must specify the `additionalTextEdits` LSP client capability to + /// truly have this feature enabled. + completion_autoimport_enable: bool = true, + /// A list of full paths to items to exclude from auto-importing completions. /// /// Traits in this list won't have their methods suggested in completions unless the trait /// is in scope. /// - /// You can either specify a string path which defaults to type "always" or use the more verbose - /// form `{ "path": "path::to::item", type: "always" }`. + /// You can either specify a string path which defaults to type "always" or use the more + /// verbose form `{ "path": "path::to::item", type: "always" }`. /// - /// For traits the type "methods" can be used to only exclude the methods but not the trait itself. + /// For traits the type "methods" can be used to only exclude the methods but not the trait + /// itself. /// /// This setting also inherits `#rust-analyzer.completion.excludeTraits#`. completion_autoimport_exclude: Vec = vec![ AutoImportExclusion::Verbose { path: "core::borrow::Borrow".to_owned(), r#type: AutoImportExclusionType::Methods }, AutoImportExclusion::Verbose { path: "core::borrow::BorrowMut".to_owned(), r#type: AutoImportExclusionType::Methods }, ], - /// Toggles the additional completions that automatically show method calls and field accesses - /// with `self` prefixed to them when inside a method. - completion_autoself_enable: bool = true, - /// Whether to add parenthesis and argument snippets when completing function. - completion_callable_snippets: CallableCompletionDef = CallableCompletionDef::FillArguments, + + /// Show method calls and field access completions with `self` prefixed to them when + /// inside a method. + completion_autoself_enable: bool = true, + + /// Add parenthesis and argument snippets when completing function. + completion_callable_snippets: CallableCompletionDef = CallableCompletionDef::FillArguments, + /// A list of full paths to traits whose methods to exclude from completion. /// - /// Methods from these traits won't be completed, even if the trait is in scope. However, they will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or `T where T: Trait`. + /// Methods from these traits won't be completed, even if the trait is in scope. However, + /// they will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or + /// `T where T: Trait`. /// /// Note that the trait themselves can still be completed. completion_excludeTraits: Vec = Vec::new(), - /// Whether to show full function/method signatures in completion docs. + + /// Show full function / method signatures in completion docs. completion_fullFunctionSignatures_enable: bool = false, - /// Whether to omit deprecated items from autocompletion. By default they are marked as deprecated but not hidden. + + /// Omit deprecated items from completions. By default they are marked as deprecated but not + /// hidden. completion_hideDeprecated: bool = false, + /// Maximum number of completions to return. If `None`, the limit is infinite. completion_limit: Option = None, - /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc. - completion_postfix_enable: bool = true, - /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position. + + /// Show postfix snippets like `dbg`, `if`, `not`, etc. + completion_postfix_enable: bool = true, + + /// Show completions of private items and fields that are defined in the current workspace + /// even if they are not visible at the current position. completion_privateEditable_enable: bool = false, - /// Whether to enable term search based snippets like `Some(foo.bar().baz())`. + + /// Enable term search based snippets like `Some(foo.bar().baz())`. completion_termSearch_enable: bool = false, + /// Term search fuel in "units of work" for autocompletion (Defaults to 1000). completion_termSearch_fuel: usize = 1000, /// List of rust-analyzer diagnostics to disable. diagnostics_disabled: FxHashSet = FxHashSet::default(), - /// Whether to show native rust-analyzer diagnostics. - diagnostics_enable: bool = true, - /// Whether to show experimental rust-analyzer diagnostics that might - /// have more false positives than usual. - diagnostics_experimental_enable: bool = false, - /// Map of prefixes to be substituted when parsing diagnostic file paths. - /// This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`. + + /// Show native rust-analyzer diagnostics. + diagnostics_enable: bool = true, + + /// Show experimental rust-analyzer diagnostics that might have more false positives than + /// usual. + diagnostics_experimental_enable: bool = false, + + /// Map of prefixes to be substituted when parsing diagnostic file paths. This should be the + /// reverse mapping of what is passed to `rustc` as `--remap-path-prefix`. diagnostics_remapPrefix: FxHashMap = FxHashMap::default(), - /// Whether to run additional style lints. - diagnostics_styleLints_enable: bool = false, + + /// Run additional style lints. + diagnostics_styleLints_enable: bool = false, + /// List of warnings that should be displayed with hint severity. /// - /// The warnings will be indicated by faded text or three dots in code - /// and will not show up in the `Problems Panel`. + /// The warnings will be indicated by faded text or three dots in code and will not show up + /// in the `Problems Panel`. diagnostics_warningsAsHint: Vec = vec![], + /// List of warnings that should be displayed with info severity. /// - /// The warnings will be indicated by a blue squiggly underline in code - /// and a blue icon in the `Problems Panel`. + /// The warnings will be indicated by a blue squiggly underline in code and a blue icon in + /// the `Problems Panel`. diagnostics_warningsAsInfo: Vec = vec![], - /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file. - imports_granularity_enforce: bool = false, + /// Enforce the import granularity setting for all files. If set to false rust-analyzer will + /// try to keep import styles consistent per file. + imports_granularity_enforce: bool = false, + /// How imports should be grouped into use statements. - imports_granularity_group: ImportGranularityDef = ImportGranularityDef::Crate, - /// Group inserted imports by the [following order](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are separated by newlines. - imports_group_enable: bool = true, - /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`. - imports_merge_glob: bool = true, + imports_granularity_group: ImportGranularityDef = ImportGranularityDef::Crate, + + /// Group inserted imports by the [following + /// order](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are + /// separated by newlines. + imports_group_enable: bool = true, + + /// Allow import insertion to merge new imports into single path glob imports like `use + /// std::fmt::*;`. + imports_merge_glob: bool = true, + /// Prefer to unconditionally use imports of the core and alloc crate, over the std crate. imports_preferNoStd | imports_prefer_no_std: bool = false, - /// Whether to prefer import paths containing a `prelude` module. - imports_preferPrelude: bool = false, + + /// Prefer import paths containing a `prelude` module. + imports_preferPrelude: bool = false, + /// The path structure for newly inserted paths to use. - imports_prefix: ImportPrefixDef = ImportPrefixDef::ByCrate, - /// Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;". + imports_prefix: ImportPrefixDef = ImportPrefixDef::ByCrate, + + /// Prefix external (including std, core) crate imports with `::`. + /// + /// E.g. `use ::std::io::Read;`. imports_prefixExternPrelude: bool = false, } } @@ -589,7 +726,9 @@ config_data! { /// ```bash /// cargo check --quiet --workspace --message-format=json --all-targets --keep-going /// ``` - /// . + /// + /// Note: The option must be specified as an array of command line arguments, with + /// the first argument being the name of the command to run. cargo_buildScripts_overrideCommand: Option> = None, /// Rerun proc-macros building/build-scripts running when proc-macro /// or build-script sources change and are saved. @@ -703,7 +842,9 @@ config_data! { /// ```bash /// cargo check --workspace --message-format=json --all-targets /// ``` - /// . + /// + /// Note: The option must be specified as an array of command line arguments, with + /// the first argument being the name of the command to run. check_overrideCommand | checkOnSave_overrideCommand: Option> = None, /// Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. /// @@ -753,6 +894,9 @@ config_data! { /// not that of `cargo fmt`. The file contents will be passed on the /// standard input and the formatted result will be read from the /// standard output. + /// + /// Note: The option must be specified as an array of command line arguments, with + /// the first argument being the name of the command to run. rustfmt_overrideCommand: Option> = None, /// Enables the use of rustfmt's unstable range formatting command for the /// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only @@ -3201,8 +3345,10 @@ fn schema(fields: &[SchemaField]) -> serde_json::Value { .iter() .map(|(field, ty, doc, default)| { let name = field.replace('_', "."); - let category = - name.find('.').map(|end| String::from(&name[..end])).unwrap_or("general".into()); + let category = name + .split_once(".") + .map(|(category, _name)| to_title_case(category)) + .unwrap_or("rust-analyzer".into()); let name = format!("rust-analyzer.{name}"); let props = field_props(field, ty, doc, default); serde_json::json!({ @@ -3216,6 +3362,29 @@ fn schema(fields: &[SchemaField]) -> serde_json::Value { map.into() } +/// Translate a field name to a title case string suitable for use in the category names on the +/// vscode settings page. +/// +/// First letter of word should be uppercase, if an uppercase letter is encountered, add a space +/// before it e.g. "fooBar" -> "Foo Bar", "fooBarBaz" -> "Foo Bar Baz", "foo" -> "Foo" +/// +/// This likely should be in stdx (or just use heck instead), but it doesn't handle any edge cases +/// and is intentionally simple. +fn to_title_case(s: &str) -> String { + let mut result = String::with_capacity(s.len()); + let mut chars = s.chars(); + if let Some(first) = chars.next() { + result.push(first.to_ascii_uppercase()); + for c in chars { + if c.is_uppercase() { + result.push(' '); + } + result.push(c); + } + } + result +} + fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json::Value { let doc = doc_comment_to_string(doc); let doc = doc.trim_end_matches('\n'); @@ -3742,17 +3911,16 @@ mod tests { for idx in url_offsets { let link = &schema[idx..]; // matching on whitespace to ignore normal links - if let Some(link_end) = link.find([' ', '[']) { - if link.chars().nth(link_end) == Some('[') { - if let Some(link_text_end) = link.find(']') { - let link_text = link[link_end..(link_text_end + 1)].to_string(); - - schema.replace_range((idx + link_end)..(idx + link_text_end + 1), ""); - schema.insert(idx, '('); - schema.insert(idx + link_end + 1, ')'); - schema.insert_str(idx, &link_text); - } - } + if let Some(link_end) = link.find([' ', '[']) + && link.chars().nth(link_end) == Some('[') + && let Some(link_text_end) = link.find(']') + { + let link_text = link[link_end..(link_text_end + 1)].to_string(); + + schema.replace_range((idx + link_end)..(idx + link_text_end + 1), ""); + schema.insert(idx, '('); + schema.insert(idx + link_end + 1, ')'); + schema.insert_str(idx, &link_text); } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs index 95857dd8f3b4b..389bb7848c01c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs @@ -73,19 +73,19 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { } // completion.snippets -> completion.snippets.custom; - if let Some(Value::Object(obj)) = copy.pointer("/completion/snippets").cloned() { - if obj.len() != 1 || obj.get("custom").is_none() { - merge( - json, - json! {{ - "completion": { - "snippets": { - "custom": obj - }, + if let Some(Value::Object(obj)) = copy.pointer("/completion/snippets").cloned() + && (obj.len() != 1 || obj.get("custom").is_none()) + { + merge( + json, + json! {{ + "completion": { + "snippets": { + "custom": obj }, - }}, - ); - } + }, + }}, + ); } // callInfo_full -> signatureInfo_detail, signatureInfo_documentation_enable diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs index 79d8f678de4d6..3f64628de8606 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -298,10 +298,10 @@ pub(crate) fn map_rust_diagnostic_to_lsp( let mut source = String::from("rustc"); let mut code = rd.code.as_ref().map(|c| c.code.clone()); - if let Some(code_val) = &code { - if config.check_ignore.contains(code_val) { - return Vec::new(); - } + if let Some(code_val) = &code + && config.check_ignore.contains(code_val) + { + return Vec::new(); } if let Some(code_val) = &code { @@ -373,10 +373,8 @@ pub(crate) fn map_rust_diagnostic_to_lsp( let primary_location = primary_location(config, workspace_root, primary_span, snap); let message = { let mut message = message.clone(); - if needs_primary_span_label { - if let Some(primary_span_label) = &primary_span.label { - format_to!(message, "\n{}", primary_span_label); - } + if needs_primary_span_label && let Some(primary_span_label) = &primary_span.label { + format_to!(message, "\n{}", primary_span_label); } message }; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 512ce0b9de354..e4e0bcdc1cd08 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -492,12 +492,11 @@ impl FlycheckActor { FlycheckConfig::CargoCommand { command, options, ansi_color_output } => { let mut cmd = toolchain::command(Tool::Cargo.path(), &*self.root, &options.extra_env); - if let Some(sysroot_root) = &self.sysroot_root { - if !options.extra_env.contains_key("RUSTUP_TOOLCHAIN") - && std::env::var_os("RUSTUP_TOOLCHAIN").is_none() - { - cmd.env("RUSTUP_TOOLCHAIN", AsRef::::as_ref(sysroot_root)); - } + if let Some(sysroot_root) = &self.sysroot_root + && !options.extra_env.contains_key("RUSTUP_TOOLCHAIN") + && std::env::var_os("RUSTUP_TOOLCHAIN").is_none() + { + cmd.env("RUSTUP_TOOLCHAIN", AsRef::::as_ref(sysroot_root)); } cmd.arg(command); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 62a28a1a685d4..2f1afba3634ef 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -448,7 +448,7 @@ impl GlobalState { tracing::info!(%vfs_path, ?change_kind, "Processing rust-analyzer.toml changes"); if vfs_path.as_path() == user_config_abs_path { tracing::info!(%vfs_path, ?change_kind, "Use config rust-analyzer.toml changes"); - change.change_user_config(Some(db.file_text(file_id).text(db))); + change.change_user_config(Some(db.file_text(file_id).text(db).clone())); } // If change has been made to a ratoml file that @@ -462,14 +462,14 @@ impl GlobalState { change.change_workspace_ratoml( source_root_id, vfs_path.clone(), - Some(db.file_text(file_id).text(db)), + Some(db.file_text(file_id).text(db).clone()), ) } else { tracing::info!(%vfs_path, ?source_root_id, "crate rust-analyzer.toml changes"); change.change_ratoml( source_root_id, vfs_path.clone(), - Some(db.file_text(file_id).text(db)), + Some(db.file_text(file_id).text(db).clone()), ) }; @@ -591,10 +591,10 @@ impl GlobalState { pub(crate) fn respond(&mut self, response: lsp_server::Response) { if let Some((method, start)) = self.req_queue.incoming.complete(&response.id) { - if let Some(err) = &response.error { - if err.message.starts_with("server panicked") { - self.poke_rust_analyzer_developer(format!("{}, check the log", err.message)); - } + if let Some(err) = &response.error + && err.message.starts_with("server panicked") + { + self.poke_rust_analyzer_developer(format!("{}, check the log", err.message)); } let duration = start.elapsed(); @@ -663,18 +663,18 @@ impl GlobalState { pub(crate) fn check_workspaces_msrv(&self) -> impl Iterator + '_ { self.workspaces.iter().filter_map(|ws| { - if let Some(toolchain) = &ws.toolchain { - if *toolchain < crate::MINIMUM_SUPPORTED_TOOLCHAIN_VERSION { - return Some(format!( - "Workspace `{}` is using an outdated toolchain version `{}` but \ + if let Some(toolchain) = &ws.toolchain + && *toolchain < crate::MINIMUM_SUPPORTED_TOOLCHAIN_VERSION + { + return Some(format!( + "Workspace `{}` is using an outdated toolchain version `{}` but \ rust-analyzer only supports `{}` and higher.\n\ Consider using the rust-analyzer rustup component for your toolchain or upgrade your toolchain to a supported version.\n\n", - ws.manifest_or_root(), - toolchain, - crate::MINIMUM_SUPPORTED_TOOLCHAIN_VERSION, - )); - } + ws.manifest_or_root(), + toolchain, + crate::MINIMUM_SUPPORTED_TOOLCHAIN_VERSION, + )); } None }) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs index aea116e647db8..b25245dd884a4 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs @@ -433,10 +433,10 @@ impl NotificationDispatcher<'_> { } pub(crate) fn finish(&mut self) { - if let Some(not) = &self.not { - if !not.method.starts_with("$/") { - tracing::error!("unhandled notification: {:?}", not); - } + if let Some(not) = &self.not + && !not.method.starts_with("$/") + { + tracing::error!("unhandled notification: {:?}", not); } } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index 200e972e42897..e193ff77743d1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -39,14 +39,12 @@ pub(crate) fn handle_work_done_progress_cancel( state: &mut GlobalState, params: WorkDoneProgressCancelParams, ) -> anyhow::Result<()> { - if let lsp_types::NumberOrString::String(s) = ¶ms.token { - if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") { - if let Ok(id) = id.parse::() { - if let Some(flycheck) = state.flycheck.get(id as usize) { - flycheck.cancel(); - } - } - } + if let lsp_types::NumberOrString::String(s) = ¶ms.token + && let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") + && let Ok(id) = id.parse::() + && let Some(flycheck) = state.flycheck.get(id as usize) + { + flycheck.cancel(); } // Just ignore this. It is OK to continue sending progress @@ -76,12 +74,12 @@ pub(crate) fn handle_did_open_text_document( tracing::error!("duplicate DidOpenTextDocument: {}", path); } - if let Some(abs_path) = path.as_path() { - if state.config.excluded().any(|excluded| abs_path.starts_with(&excluded)) { - tracing::trace!("opened excluded file {abs_path}"); - state.vfs.write().0.insert_excluded_file(path); - return Ok(()); - } + if let Some(abs_path) = path.as_path() + && state.config.excluded().any(|excluded| abs_path.starts_with(&excluded)) + { + tracing::trace!("opened excluded file {abs_path}"); + state.vfs.write().0.insert_excluded_file(path); + return Ok(()); } let contents = params.text_document.text.into_bytes(); @@ -449,12 +447,11 @@ pub(crate) fn handle_run_flycheck( params: RunFlycheckParams, ) -> anyhow::Result<()> { let _p = tracing::info_span!("handle_run_flycheck").entered(); - if let Some(text_document) = params.text_document { - if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) { - if run_flycheck(state, vfs_path) { - return Ok(()); - } - } + if let Some(text_document) = params.text_document + && let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) + && run_flycheck(state, vfs_path) + { + return Ok(()); } // No specific flycheck was triggered, so let's trigger all of them. if state.config.flycheck_workspace(None) { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index a76a65220d3b0..25c0aac405e79 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -973,14 +973,13 @@ pub(crate) fn handle_runnables( res.push(runnable); } - if let lsp_ext::RunnableArgs::Cargo(r) = &mut runnable.args { - if let Some(TargetSpec::Cargo(CargoTargetSpec { + if let lsp_ext::RunnableArgs::Cargo(r) = &mut runnable.args + && let Some(TargetSpec::Cargo(CargoTargetSpec { sysroot_root: Some(sysroot_root), .. })) = &target_spec - { - r.environment.insert("RUSTC_TOOLCHAIN".to_owned(), sysroot_root.to_string()); - } + { + r.environment.insert("RUSTC_TOOLCHAIN".to_owned(), sysroot_root.to_string()); }; res.push(runnable); @@ -1034,25 +1033,25 @@ pub(crate) fn handle_runnables( } Some(TargetSpec::ProjectJson(_)) => {} None => { - if !snap.config.linked_or_discovered_projects().is_empty() { - if let Some(path) = snap.file_id_to_file_path(file_id).parent() { - let mut cargo_args = vec!["check".to_owned(), "--workspace".to_owned()]; - cargo_args.extend(config.cargo_extra_args.iter().cloned()); - res.push(lsp_ext::Runnable { - label: "cargo check --workspace".to_owned(), - location: None, - kind: lsp_ext::RunnableKind::Cargo, - args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs { - workspace_root: None, - cwd: path.as_path().unwrap().to_path_buf().into(), - override_cargo: config.override_cargo, - cargo_args, - executable_args: Vec::new(), - environment: Default::default(), - }), - }); - }; - } + if !snap.config.linked_or_discovered_projects().is_empty() + && let Some(path) = snap.file_id_to_file_path(file_id).parent() + { + let mut cargo_args = vec!["check".to_owned(), "--workspace".to_owned()]; + cargo_args.extend(config.cargo_extra_args.iter().cloned()); + res.push(lsp_ext::Runnable { + label: "cargo check --workspace".to_owned(), + location: None, + kind: lsp_ext::RunnableKind::Cargo, + args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs { + workspace_root: None, + cwd: path.as_path().unwrap().to_path_buf().into(), + override_cargo: config.override_cargo, + cargo_args, + executable_args: Vec::new(), + environment: Default::default(), + }), + }); + }; } } Ok(res) @@ -1557,12 +1556,12 @@ pub(crate) fn handle_code_action_resolve( code_action.edit = ca.edit; code_action.command = ca.command; - if let Some(edit) = code_action.edit.as_ref() { - if let Some(changes) = edit.document_changes.as_ref() { - for change in changes { - if let lsp_ext::SnippetDocumentChangeOperation::Op(res_op) = change { - resource_ops_supported(&snap.config, resolve_resource_op(res_op))? - } + if let Some(edit) = code_action.edit.as_ref() + && let Some(changes) = edit.document_changes.as_ref() + { + for change in changes { + if let lsp_ext::SnippetDocumentChangeOperation::Op(res_op) = change { + resource_ops_supported(&snap.config, resolve_resource_op(res_op))? } } } @@ -1958,12 +1957,11 @@ pub(crate) fn handle_semantic_tokens_full_delta( if let Some(cached_tokens @ lsp_types::SemanticTokens { result_id: Some(prev_id), .. }) = &cached_tokens + && *prev_id == params.previous_result_id { - if *prev_id == params.previous_result_id { - let delta = to_proto::semantic_token_delta(cached_tokens, &semantic_tokens); - snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens); - return Ok(Some(delta.into())); - } + let delta = to_proto::semantic_token_delta(cached_tokens, &semantic_tokens); + snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens); + return Ok(Some(delta.into())); } // Clone first to keep the lock short @@ -2122,24 +2120,25 @@ fn show_impl_command_link( snap: &GlobalStateSnapshot, position: &FilePosition, ) -> Option { - if snap.config.hover_actions().implementations && snap.config.client_commands().show_reference { - if let Some(nav_data) = snap.analysis.goto_implementation(*position).unwrap_or(None) { - let uri = to_proto::url(snap, position.file_id); - let line_index = snap.file_line_index(position.file_id).ok()?; - let position = to_proto::position(&line_index, position.offset); - let locations: Vec<_> = nav_data - .info - .into_iter() - .filter_map(|nav| to_proto::location_from_nav(snap, nav).ok()) - .collect(); - let title = to_proto::implementation_title(locations.len()); - let command = to_proto::command::show_references(title, &uri, position, locations); - - return Some(lsp_ext::CommandLinkGroup { - commands: vec![to_command_link(command, "Go to implementations".into())], - ..Default::default() - }); - } + if snap.config.hover_actions().implementations + && snap.config.client_commands().show_reference + && let Some(nav_data) = snap.analysis.goto_implementation(*position).unwrap_or(None) + { + let uri = to_proto::url(snap, position.file_id); + let line_index = snap.file_line_index(position.file_id).ok()?; + let position = to_proto::position(&line_index, position.offset); + let locations: Vec<_> = nav_data + .info + .into_iter() + .filter_map(|nav| to_proto::location_from_nav(snap, nav).ok()) + .collect(); + let title = to_proto::implementation_title(locations.len()); + let command = to_proto::command::show_references(title, &uri, position, locations); + + return Some(lsp_ext::CommandLinkGroup { + commands: vec![to_command_link(command, "Go to implementations".into())], + ..Default::default() + }); } None } @@ -2148,28 +2147,29 @@ fn show_ref_command_link( snap: &GlobalStateSnapshot, position: &FilePosition, ) -> Option { - if snap.config.hover_actions().references && snap.config.client_commands().show_reference { - if let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) { - let uri = to_proto::url(snap, position.file_id); - let line_index = snap.file_line_index(position.file_id).ok()?; - let position = to_proto::position(&line_index, position.offset); - let locations: Vec<_> = ref_search_res - .into_iter() - .flat_map(|res| res.references) - .flat_map(|(file_id, ranges)| { - ranges.into_iter().map(move |(range, _)| FileRange { file_id, range }) - }) - .unique() - .filter_map(|range| to_proto::location(snap, range).ok()) - .collect(); - let title = to_proto::reference_title(locations.len()); - let command = to_proto::command::show_references(title, &uri, position, locations); - - return Some(lsp_ext::CommandLinkGroup { - commands: vec![to_command_link(command, "Go to references".into())], - ..Default::default() - }); - } + if snap.config.hover_actions().references + && snap.config.client_commands().show_reference + && let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) + { + let uri = to_proto::url(snap, position.file_id); + let line_index = snap.file_line_index(position.file_id).ok()?; + let position = to_proto::position(&line_index, position.offset); + let locations: Vec<_> = ref_search_res + .into_iter() + .flat_map(|res| res.references) + .flat_map(|(file_id, ranges)| { + ranges.into_iter().map(move |(range, _)| FileRange { file_id, range }) + }) + .unique() + .filter_map(|range| to_proto::location(snap, range).ok()) + .collect(); + let title = to_proto::reference_title(locations.len()); + let command = to_proto::command::show_references(title, &uri, position, locations); + + return Some(lsp_ext::CommandLinkGroup { + commands: vec![to_command_link(command, "Go to references".into())], + ..Default::default() + }); } None } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 00cf890510d45..61c758d5e86e1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -501,14 +501,12 @@ impl GlobalState { } } - if self.config.cargo_autoreload_config(None) - || self.config.discover_workspace_config().is_some() - { - if let Some((cause, FetchWorkspaceRequest { path, force_crate_graph_reload })) = + if (self.config.cargo_autoreload_config(None) + || self.config.discover_workspace_config().is_some()) + && let Some((cause, FetchWorkspaceRequest { path, force_crate_graph_reload })) = self.fetch_workspaces_queue.should_start_op() - { - self.fetch_workspaces(cause, path, force_crate_graph_reload); - } + { + self.fetch_workspaces(cause, path, force_crate_graph_reload); } if !self.fetch_workspaces_queue.op_in_progress() { @@ -765,33 +763,33 @@ impl GlobalState { self.report_progress("Fetching", state, msg, None, None); } Task::DiscoverLinkedProjects(arg) => { - if let Some(cfg) = self.config.discover_workspace_config() { - if !self.discover_workspace_queue.op_in_progress() { - // the clone is unfortunately necessary to avoid a borrowck error when - // `self.report_progress` is called later - let title = &cfg.progress_label.clone(); - let command = cfg.command.clone(); - let discover = DiscoverCommand::new(self.discover_sender.clone(), command); - - self.report_progress(title, Progress::Begin, None, None, None); - self.discover_workspace_queue - .request_op("Discovering workspace".to_owned(), ()); - let _ = self.discover_workspace_queue.should_start_op(); - - let arg = match arg { - DiscoverProjectParam::Buildfile(it) => DiscoverArgument::Buildfile(it), - DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it), - }; + if let Some(cfg) = self.config.discover_workspace_config() + && !self.discover_workspace_queue.op_in_progress() + { + // the clone is unfortunately necessary to avoid a borrowck error when + // `self.report_progress` is called later + let title = &cfg.progress_label.clone(); + let command = cfg.command.clone(); + let discover = DiscoverCommand::new(self.discover_sender.clone(), command); + + self.report_progress(title, Progress::Begin, None, None, None); + self.discover_workspace_queue + .request_op("Discovering workspace".to_owned(), ()); + let _ = self.discover_workspace_queue.should_start_op(); + + let arg = match arg { + DiscoverProjectParam::Buildfile(it) => DiscoverArgument::Buildfile(it), + DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it), + }; - let handle = discover.spawn( - arg, - &std::env::current_dir() - .expect("Failed to get cwd during project discovery"), - ); - self.discover_handle = Some(handle.unwrap_or_else(|e| { - panic!("Failed to spawn project discovery command: {e}") - })); - } + let handle = discover.spawn( + arg, + &std::env::current_dir() + .expect("Failed to get cwd during project discovery"), + ); + self.discover_handle = Some(handle.unwrap_or_else(|e| { + panic!("Failed to spawn project discovery command: {e}") + })); } } Task::FetchBuildData(progress) => { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index e798aa6a8a606..aa38aa72d44eb 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -306,13 +306,13 @@ impl GlobalState { _ => None, }); - if let Some(build) = build { - if is_quiescent { - let path = AbsPathBuf::try_from(build.build_file) - .expect("Unable to convert to an AbsPath"); - let arg = DiscoverProjectParam::Buildfile(path); - sender.send(Task::DiscoverLinkedProjects(arg)).unwrap(); - } + if let Some(build) = build + && is_quiescent + { + let path = AbsPathBuf::try_from(build.build_file) + .expect("Unable to convert to an AbsPath"); + let arg = DiscoverProjectParam::Buildfile(path); + sender.send(Task::DiscoverLinkedProjects(arg)).unwrap(); } } diff --git a/src/tools/rust-analyzer/crates/span/src/lib.rs b/src/tools/rust-analyzer/crates/span/src/lib.rs index b81d08eed6d88..ae9e038459e50 100644 --- a/src/tools/rust-analyzer/crates/span/src/lib.rs +++ b/src/tools/rust-analyzer/crates/span/src/lib.rs @@ -203,15 +203,3 @@ pub struct HirFileId(pub salsa::Id); /// `println!("Hello, {}", world)`. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct MacroCallId(pub salsa::Id); - -/// Legacy span type, only defined here as it is still used by the proc-macro server. -/// While rust-analyzer doesn't use this anymore at all, RustRover relies on the legacy type for -/// proc-macro expansion. -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -pub struct TokenId(pub u32); - -impl std::fmt::Debug for TokenId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.fmt(f) - } -} diff --git a/src/tools/rust-analyzer/crates/span/src/map.rs b/src/tools/rust-analyzer/crates/span/src/map.rs index f58201793da28..bb09933536e71 100644 --- a/src/tools/rust-analyzer/crates/span/src/map.rs +++ b/src/tools/rust-analyzer/crates/span/src/map.rs @@ -41,13 +41,13 @@ where /// Pushes a new span onto the [`SpanMap`]. pub fn push(&mut self, offset: TextSize, span: SpanData) { - if cfg!(debug_assertions) { - if let Some(&(last_offset, _)) = self.spans.last() { - assert!( - last_offset < offset, - "last_offset({last_offset:?}) must be smaller than offset({offset:?})" - ); - } + if cfg!(debug_assertions) + && let Some(&(last_offset, _)) = self.spans.last() + { + assert!( + last_offset < offset, + "last_offset({last_offset:?}) must be smaller than offset({offset:?})" + ); } self.spans.push((offset, span)); } diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs index d59229952f527..bdff671802c2a 100644 --- a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs @@ -768,17 +768,17 @@ where } fn bump(&mut self) -> Option<(Self::Token, TextRange)> { - if let Some((punct, offset)) = self.punct_offset.clone() { - if usize::from(offset) + 1 < punct.text().len() { - let offset = offset + TextSize::of('.'); - let range = punct.text_range(); - self.punct_offset = Some((punct.clone(), offset)); - let range = TextRange::at(range.start() + offset, TextSize::of('.')); - return Some(( - SynToken::Punct { token: punct, offset: u32::from(offset) as usize }, - range, - )); - } + if let Some((punct, offset)) = self.punct_offset.clone() + && usize::from(offset) + 1 < punct.text().len() + { + let offset = offset + TextSize::of('.'); + let range = punct.text_range(); + self.punct_offset = Some((punct.clone(), offset)); + let range = TextRange::at(range.start() + offset, TextSize::of('.')); + return Some(( + SynToken::Punct { token: punct, offset: u32::from(offset) as usize }, + range, + )); } if let Some(leaf) = self.current_leaves.pop() { diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/prettify_macro_expansion.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/prettify_macro_expansion.rs index 0a5c8df0d0aef..2f932e0458324 100644 --- a/src/tools/rust-analyzer/crates/syntax-bridge/src/prettify_macro_expansion.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/prettify_macro_expansion.rs @@ -61,10 +61,11 @@ pub fn prettify_macro_expansion( } _ => continue, }; - if token.kind() == SyntaxKind::IDENT && token.text() == "$crate" { - if let Some(replacement) = dollar_crate_replacement(&token) { - dollar_crate_replacements.push((token.clone(), replacement)); - } + if token.kind() == SyntaxKind::IDENT + && token.text() == "$crate" + && let Some(replacement) = dollar_crate_replacement(&token) + { + dollar_crate_replacements.push((token.clone(), replacement)); } let tok = &token; diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs index 8871bf56a5df7..c8dc3131b59c6 100644 --- a/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs @@ -34,14 +34,11 @@ fn check_punct_spacing(fixture: &str) { while !cursor.eof() { while let Some(token_tree) = cursor.token_tree() { if let tt::TokenTree::Leaf(Leaf::Punct(Punct { - spacing, - span: Span { range, .. }, - .. + spacing, span: Span { range, .. }, .. })) = token_tree + && let Some(expected) = annotations.remove(range) { - if let Some(expected) = annotations.remove(range) { - assert_eq!(expected, *spacing); - } + assert_eq!(expected, *spacing); } cursor.bump(); } diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs index 021dc6595f9b9..c0ff8e1db2c2d 100644 --- a/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs @@ -21,17 +21,17 @@ pub fn to_parser_input( let tt = current.token_tree(); // Check if it is lifetime - if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tt { - if punct.char == '\'' { - current.bump(); - match current.token_tree() { - Some(tt::TokenTree::Leaf(tt::Leaf::Ident(_ident))) => { - res.push(LIFETIME_IDENT); - current.bump(); - continue; - } - _ => panic!("Next token must be ident"), + if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tt + && punct.char == '\'' + { + current.bump(); + match current.token_tree() { + Some(tt::TokenTree::Leaf(tt::Leaf::Ident(_ident))) => { + res.push(LIFETIME_IDENT); + current.bump(); + continue; } + _ => panic!("Next token must be ident"), } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs index d97fdec524fbb..9b30642fe4b08 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs @@ -89,11 +89,11 @@ impl IndentLevel { _ => None, }); for token in tokens { - if let Some(ws) = ast::Whitespace::cast(token) { - if ws.text().contains('\n') { - let new_ws = make::tokens::whitespace(&format!("{}{self}", ws.syntax())); - ted::replace(ws.syntax(), &new_ws); - } + if let Some(ws) = ast::Whitespace::cast(token) + && ws.text().contains('\n') + { + let new_ws = make::tokens::whitespace(&format!("{}{self}", ws.syntax())); + ted::replace(ws.syntax(), &new_ws); } } } @@ -122,13 +122,13 @@ impl IndentLevel { _ => None, }); for token in tokens { - if let Some(ws) = ast::Whitespace::cast(token) { - if ws.text().contains('\n') { - let new_ws = make::tokens::whitespace( - &ws.syntax().text().replace(&format!("\n{self}"), "\n"), - ); - ted::replace(ws.syntax(), &new_ws); - } + if let Some(ws) = ast::Whitespace::cast(token) + && ws.text().contains('\n') + { + let new_ws = make::tokens::whitespace( + &ws.syntax().text().replace(&format!("\n{self}"), "\n"), + ); + ted::replace(ws.syntax(), &new_ws); } } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs index 28b543ea70644..b50ce6442432d 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -9,11 +9,11 @@ use crate::{ SyntaxKind::{ATTR, COMMENT, WHITESPACE}, SyntaxNode, SyntaxToken, algo::{self, neighbor}, - ast::{self, HasGenericArgs, HasGenericParams, edit::IndentLevel, make}, + ast::{self, HasGenericParams, edit::IndentLevel, make}, ted::{self, Position}, }; -use super::{GenericParam, HasArgList, HasName}; +use super::{GenericParam, HasName}; pub trait GenericParamsOwnerEdit: ast::HasGenericParams { fn get_or_create_generic_param_list(&self) -> ast::GenericParamList; @@ -383,10 +383,10 @@ impl ast::GenericParamList { impl ast::WhereClause { pub fn add_predicate(&self, predicate: ast::WherePred) { - if let Some(pred) = self.predicates().last() { - if !pred.syntax().siblings_with_tokens(Direction::Next).any(|it| it.kind() == T![,]) { - ted::append_child_raw(self.syntax(), make::token(T![,])); - } + if let Some(pred) = self.predicates().last() + && !pred.syntax().siblings_with_tokens(Direction::Next).any(|it| it.kind() == T![,]) + { + ted::append_child_raw(self.syntax(), make::token(T![,])); } ted::append_child(self.syntax(), predicate.syntax()); } @@ -419,34 +419,6 @@ impl Removable for ast::TypeBoundList { } } -impl ast::PathSegment { - pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList { - if self.generic_arg_list().is_none() { - let arg_list = make::generic_arg_list(empty()).clone_for_update(); - ted::append_child(self.syntax(), arg_list.syntax()); - } - self.generic_arg_list().unwrap() - } -} - -impl ast::MethodCallExpr { - pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList { - if self.generic_arg_list().is_none() { - let generic_arg_list = make::turbofish_generic_arg_list(empty()).clone_for_update(); - - if let Some(arg_list) = self.arg_list() { - ted::insert_raw( - ted::Position::before(arg_list.syntax()), - generic_arg_list.syntax(), - ); - } else { - ted::append_child(self.syntax(), generic_arg_list.syntax()); - } - } - self.generic_arg_list().unwrap() - } -} - impl Removable for ast::UseTree { fn remove(&self) { for dir in [Direction::Next, Direction::Prev] { @@ -677,106 +649,6 @@ impl ast::AssocItemList { ]; ted::insert_all(position, elements); } - - /// Adds a new associated item at the start of the associated item list. - /// - /// Attention! This function does align the first line of `item` with respect to `self`, - /// but it does _not_ change indentation of other lines (if any). - pub fn add_item_at_start(&self, item: ast::AssocItem) { - match self.assoc_items().next() { - Some(first_item) => { - let indent = IndentLevel::from_node(first_item.syntax()); - let before = Position::before(first_item.syntax()); - - ted::insert_all( - before, - vec![ - item.syntax().clone().into(), - make::tokens::whitespace(&format!("\n\n{indent}")).into(), - ], - ) - } - None => { - let (indent, position, whitespace) = match self.l_curly_token() { - Some(l_curly) => { - normalize_ws_between_braces(self.syntax()); - (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n") - } - None => (IndentLevel::single(), Position::first_child_of(self.syntax()), ""), - }; - - let mut elements = vec![]; - - // Avoid pushing an empty whitespace token - if !indent.is_zero() || !whitespace.is_empty() { - elements.push(make::tokens::whitespace(&format!("{whitespace}{indent}")).into()) - } - elements.push(item.syntax().clone().into()); - - ted::insert_all(position, elements) - } - }; - } -} - -impl ast::Fn { - pub fn get_or_create_body(&self) -> ast::BlockExpr { - if self.body().is_none() { - let body = make::ext::empty_block_expr().clone_for_update(); - match self.semicolon_token() { - Some(semi) => { - ted::replace(semi, body.syntax()); - ted::insert(Position::before(body.syntax), make::tokens::single_space()); - } - None => ted::append_child(self.syntax(), body.syntax()), - } - } - self.body().unwrap() - } -} - -impl ast::LetStmt { - pub fn set_ty(&self, ty: Option) { - match ty { - None => { - if let Some(colon_token) = self.colon_token() { - ted::remove(colon_token); - } - - if let Some(existing_ty) = self.ty() { - if let Some(sibling) = existing_ty.syntax().prev_sibling_or_token() { - if sibling.kind() == SyntaxKind::WHITESPACE { - ted::remove(sibling); - } - } - - ted::remove(existing_ty.syntax()); - } - - // Remove any trailing ws - if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE) - { - last.detach(); - } - } - Some(new_ty) => { - if self.colon_token().is_none() { - ted::insert_raw( - Position::after( - self.pat().expect("let stmt should have a pattern").syntax(), - ), - make::token(T![:]), - ); - } - - if let Some(old_ty) = self.ty() { - ted::replace(old_ty.syntax(), new_ty.syntax()); - } else { - ted::insert(Position::after(self.colon_token().unwrap()), new_ty.syntax()); - } - } - } - } } impl ast::RecordExprFieldList { @@ -823,19 +695,18 @@ impl ast::RecordExprField { return; } // this is a shorthand - if let Some(ast::Expr::PathExpr(path_expr)) = self.expr() { - if let Some(path) = path_expr.path() { - if let Some(name_ref) = path.as_single_name_ref() { - path_expr.syntax().detach(); - let children = vec![ - name_ref.syntax().clone().into(), - ast::make::token(T![:]).into(), - ast::make::tokens::single_space().into(), - expr.syntax().clone().into(), - ]; - ted::insert_all_raw(Position::last_child_of(self.syntax()), children); - } - } + if let Some(ast::Expr::PathExpr(path_expr)) = self.expr() + && let Some(path) = path_expr.path() + && let Some(name_ref) = path.as_single_name_ref() + { + path_expr.syntax().detach(); + let children = vec![ + name_ref.syntax().clone().into(), + ast::make::token(T![:]).into(), + ast::make::tokens::single_space().into(), + expr.syntax().clone().into(), + ]; + ted::insert_all_raw(Position::last_child_of(self.syntax()), children); } } } @@ -1092,35 +963,4 @@ mod tests { check("let a @ ()", "let a", None); check("let a @ ", "let a", None); } - - #[test] - fn test_let_stmt_set_ty() { - #[track_caller] - fn check(before: &str, expected: &str, ty: Option) { - let ty = ty.map(|it| it.clone_for_update()); - - let let_stmt = ast_mut_from_text::(&format!("fn f() {{ {before} }}")); - let_stmt.set_ty(ty); - - let after = ast_mut_from_text::(&format!("fn f() {{ {expected} }}")); - assert_eq!(let_stmt.to_string(), after.to_string(), "{let_stmt:#?}\n!=\n{after:#?}"); - } - - // adding - check("let a;", "let a: ();", Some(make::ty_tuple([]))); - // no semicolon due to it being eaten during error recovery - check("let a:", "let a: ()", Some(make::ty_tuple([]))); - - // replacing - check("let a: u8;", "let a: ();", Some(make::ty_tuple([]))); - check("let a: u8 = 3;", "let a: () = 3;", Some(make::ty_tuple([]))); - check("let a: = 3;", "let a: () = 3;", Some(make::ty_tuple([]))); - - // removing - check("let a: u8;", "let a;", None); - check("let a:;", "let a;", None); - - check("let a: u8 = 3;", "let a = 3;", None); - check("let a: = 3;", "let a = 3;", None); - } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index 2a7b51c3c2481..daeb79cf081dc 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -229,9 +229,7 @@ pub fn ty_fn_ptr>( } } -pub fn assoc_item_list( - body: Option>>, -) -> ast::AssocItemList { +pub fn assoc_item_list(body: Option>) -> ast::AssocItemList { let is_break_braces = body.is_some(); let body_newline = if is_break_braces { "\n".to_owned() } else { String::new() }; let body_indent = if is_break_braces { " ".to_owned() } else { String::new() }; diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs index 00750bff0ba20..1364adb187fcc 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs @@ -276,19 +276,19 @@ impl Expr { } // Not every expression can be followed by `else` in the `let-else` - if let Some(ast::Stmt::LetStmt(e)) = stmt { - if e.let_else().is_some() { - match self { - BinExpr(e) - if e.op_kind() - .map(|op| matches!(op, BinaryOp::LogicOp(_))) - .unwrap_or(false) => - { - return true; - } - _ if self.clone().trailing_brace().is_some() => return true, - _ => {} + if let Some(ast::Stmt::LetStmt(e)) = stmt + && e.let_else().is_some() + { + match self { + BinExpr(e) + if e.op_kind() + .map(|op| matches!(op, BinaryOp::LogicOp(_))) + .unwrap_or(false) => + { + return true; } + _ if self.clone().trailing_brace().is_some() => return true, + _ => {} } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs index 1ba6107315126..738a26fed5d82 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs @@ -939,6 +939,24 @@ impl SyntaxFactory { ast } + pub fn record_expr( + &self, + path: ast::Path, + fields: ast::RecordExprFieldList, + ) -> ast::RecordExpr { + let ast = make::record_expr(path.clone(), fields.clone()).clone_for_update(); + if let Some(mut mapping) = self.mappings() { + let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone()); + builder.map_node(path.syntax().clone(), ast.path().unwrap().syntax().clone()); + builder.map_node( + fields.syntax().clone(), + ast.record_expr_field_list().unwrap().syntax().clone(), + ); + builder.finish(&mut mapping); + } + ast + } + pub fn record_expr_field( &self, name: ast::NameRef, diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs index 5107754b18257..18f5015e9eabd 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs @@ -83,6 +83,16 @@ impl SyntaxEditor { self.changes.push(Change::Replace(element.syntax_element(), None)); } + pub fn delete_all(&mut self, range: RangeInclusive) { + if range.start() == range.end() { + self.delete(range.start()); + return; + } + + debug_assert!(is_ancestor_or_self_of_element(range.start(), &self.root)); + self.changes.push(Change::ReplaceAll(range, Vec::new())) + } + pub fn replace(&mut self, old: impl Element, new: impl Element) { let old = old.syntax_element(); debug_assert!(is_ancestor_or_self_of_element(&old, &self.root)); @@ -626,10 +636,10 @@ mod tests { if let Some(ret_ty) = parent_fn.ret_type() { editor.delete(ret_ty.syntax().clone()); - if let Some(SyntaxElement::Token(token)) = ret_ty.syntax().next_sibling_or_token() { - if token.kind().is_trivia() { - editor.delete(token); - } + if let Some(SyntaxElement::Token(token)) = ret_ty.syntax().next_sibling_or_token() + && token.kind().is_trivia() + { + editor.delete(token); } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs index 840e76979792d..9090f7c9eb149 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs @@ -153,6 +153,23 @@ impl ast::VariantList { } } +impl ast::Fn { + pub fn replace_or_insert_body(&self, editor: &mut SyntaxEditor, body: ast::BlockExpr) { + if let Some(old_body) = self.body() { + editor.replace(old_body.syntax(), body.syntax()); + } else { + let single_space = make::tokens::single_space(); + let elements = vec![single_space.into(), body.syntax().clone().into()]; + + if let Some(semicolon) = self.semicolon_token() { + editor.replace_with_many(semicolon, elements); + } else { + editor.insert_all(Position::last_child_of(self.syntax()), elements); + } + } + } +} + fn normalize_ws_between_braces(editor: &mut SyntaxEditor, node: &SyntaxNode) -> Option<()> { let make = SyntaxFactory::without_mappings(); let l = node @@ -184,6 +201,15 @@ pub trait Removable: AstNode { fn remove(&self, editor: &mut SyntaxEditor); } +impl Removable for ast::TypeBoundList { + fn remove(&self, editor: &mut SyntaxEditor) { + match self.syntax().siblings_with_tokens(Direction::Prev).find(|it| it.kind() == T![:]) { + Some(colon) => editor.delete_all(colon..=self.syntax().clone().into()), + None => editor.delete(self.syntax()), + } + } +} + impl Removable for ast::Use { fn remove(&self, editor: &mut SyntaxEditor) { let make = SyntaxFactory::without_mappings(); diff --git a/src/tools/rust-analyzer/crates/syntax/src/ted.rs b/src/tools/rust-analyzer/crates/syntax/src/ted.rs index 6fcbdd006c244..5c286479c4e3d 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ted.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ted.rs @@ -90,15 +90,15 @@ pub fn insert_raw(position: Position, elem: impl Element) { insert_all_raw(position, vec![elem.syntax_element()]); } pub fn insert_all(position: Position, mut elements: Vec) { - if let Some(first) = elements.first() { - if let Some(ws) = ws_before(&position, first) { - elements.insert(0, ws.into()); - } + if let Some(first) = elements.first() + && let Some(ws) = ws_before(&position, first) + { + elements.insert(0, ws.into()); } - if let Some(last) = elements.last() { - if let Some(ws) = ws_after(&position, last) { - elements.push(ws.into()); - } + if let Some(last) = elements.last() + && let Some(ws) = ws_after(&position, last) + { + elements.push(ws.into()); } insert_all_raw(position, elements); } @@ -165,20 +165,22 @@ fn ws_before(position: &Position, new: &SyntaxElement) -> Option { PositionRepr::After(it) => it, }; - if prev.kind() == T!['{'] && new.kind() == SyntaxKind::USE { - if let Some(item_list) = prev.parent().and_then(ast::ItemList::cast) { - let mut indent = IndentLevel::from_element(&item_list.syntax().clone().into()); - indent.0 += 1; - return Some(make::tokens::whitespace(&format!("\n{indent}"))); - } + if prev.kind() == T!['{'] + && new.kind() == SyntaxKind::USE + && let Some(item_list) = prev.parent().and_then(ast::ItemList::cast) + { + let mut indent = IndentLevel::from_element(&item_list.syntax().clone().into()); + indent.0 += 1; + return Some(make::tokens::whitespace(&format!("\n{indent}"))); } - if prev.kind() == T!['{'] && ast::Stmt::can_cast(new.kind()) { - if let Some(stmt_list) = prev.parent().and_then(ast::StmtList::cast) { - let mut indent = IndentLevel::from_element(&stmt_list.syntax().clone().into()); - indent.0 += 1; - return Some(make::tokens::whitespace(&format!("\n{indent}"))); - } + if prev.kind() == T!['{'] + && ast::Stmt::can_cast(new.kind()) + && let Some(stmt_list) = prev.parent().and_then(ast::StmtList::cast) + { + let mut indent = IndentLevel::from_element(&stmt_list.syntax().clone().into()); + indent.0 += 1; + return Some(make::tokens::whitespace(&format!("\n{indent}"))); } ws_between(prev, new) diff --git a/src/tools/rust-analyzer/crates/syntax/src/validation.rs b/src/tools/rust-analyzer/crates/syntax/src/validation.rs index 4180f9cd18550..485140be8f69c 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/validation.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/validation.rs @@ -142,50 +142,50 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec) { match literal.kind() { ast::LiteralKind::String(s) => { - if !s.is_raw() { - if let Some(without_quotes) = unquote(text, 1, '"') { - unescape_str(without_quotes, |range, char| { - if let Err(err) = char { - push_err(1, range.start, err); - } - }); - } + if !s.is_raw() + && let Some(without_quotes) = unquote(text, 1, '"') + { + unescape_str(without_quotes, |range, char| { + if let Err(err) = char { + push_err(1, range.start, err); + } + }); } } ast::LiteralKind::ByteString(s) => { - if !s.is_raw() { - if let Some(without_quotes) = unquote(text, 2, '"') { - unescape_byte_str(without_quotes, |range, char| { - if let Err(err) = char { - push_err(1, range.start, err); - } - }); - } + if !s.is_raw() + && let Some(without_quotes) = unquote(text, 2, '"') + { + unescape_byte_str(without_quotes, |range, char| { + if let Err(err) = char { + push_err(1, range.start, err); + } + }); } } ast::LiteralKind::CString(s) => { - if !s.is_raw() { - if let Some(without_quotes) = unquote(text, 2, '"') { - unescape_c_str(without_quotes, |range, char| { - if let Err(err) = char { - push_err(1, range.start, err); - } - }); - } + if !s.is_raw() + && let Some(without_quotes) = unquote(text, 2, '"') + { + unescape_c_str(without_quotes, |range, char| { + if let Err(err) = char { + push_err(1, range.start, err); + } + }); } } ast::LiteralKind::Char(_) => { - if let Some(without_quotes) = unquote(text, 1, '\'') { - if let Err(err) = unescape_char(without_quotes) { - push_err(1, 0, err); - } + if let Some(without_quotes) = unquote(text, 1, '\'') + && let Err(err) = unescape_char(without_quotes) + { + push_err(1, 0, err); } } ast::LiteralKind::Byte(_) => { - if let Some(without_quotes) = unquote(text, 2, '\'') { - if let Err(err) = unescape_byte(without_quotes) { - push_err(2, 0, err); - } + if let Some(without_quotes) = unquote(text, 2, '\'') + && let Err(err) = unescape_byte(without_quotes) + { + push_err(2, 0, err); } } ast::LiteralKind::IntNumber(_) @@ -224,14 +224,14 @@ pub(crate) fn validate_block_structure(root: &SyntaxNode) { } fn validate_numeric_name(name_ref: Option, errors: &mut Vec) { - if let Some(int_token) = int_token(name_ref) { - if int_token.text().chars().any(|c| !c.is_ascii_digit()) { - errors.push(SyntaxError::new( - "Tuple (struct) field access is only allowed through \ + if let Some(int_token) = int_token(name_ref) + && int_token.text().chars().any(|c| !c.is_ascii_digit()) + { + errors.push(SyntaxError::new( + "Tuple (struct) field access is only allowed through \ decimal integers with no underscores or suffix", - int_token.text_range(), - )); - } + int_token.text_range(), + )); } fn int_token(name_ref: Option) -> Option { @@ -285,13 +285,13 @@ fn validate_path_keywords(segment: ast::PathSegment, errors: &mut Vec Option { diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs index 8937e53175abd..4413d2f222c15 100644 --- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs @@ -955,12 +955,12 @@ impl ProcMacroExpander for DisallowCfgProcMacroExpander { _: String, ) -> Result { for tt in subtree.token_trees().flat_tokens() { - if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = tt { - if ident.sym == sym::cfg || ident.sym == sym::cfg_attr { - return Err(ProcMacroExpansionError::Panic( - "cfg or cfg_attr found in DisallowCfgProcMacroExpander".to_owned(), - )); - } + if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = tt + && (ident.sym == sym::cfg || ident.sym == sym::cfg_attr) + { + return Err(ProcMacroExpansionError::Panic( + "cfg or cfg_attr found in DisallowCfgProcMacroExpander".to_owned(), + )); } } Ok(subtree.clone()) diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs index 44123385c8cc3..243a27b83b0df 100644 --- a/src/tools/rust-analyzer/crates/tt/src/lib.rs +++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs @@ -357,10 +357,10 @@ impl<'a, S: Copy> TokenTreesView<'a, S> { } pub fn try_into_subtree(self) -> Option> { - if let Some(TokenTree::Subtree(subtree)) = self.0.first() { - if subtree.usize_len() == (self.0.len() - 1) { - return Some(SubtreeView::new(self.0)); - } + if let Some(TokenTree::Subtree(subtree)) = self.0.first() + && subtree.usize_len() == (self.0.len() - 1) + { + return Some(SubtreeView::new(self.0)); } None } @@ -1028,10 +1028,10 @@ pub fn pretty(mut tkns: &[TokenTree]) -> String { tkns = rest; last = [last, tokentree_to_text(tkn, &mut tkns)].join(if last_to_joint { "" } else { " " }); last_to_joint = false; - if let TokenTree::Leaf(Leaf::Punct(punct)) = tkn { - if punct.spacing == Spacing::Joint { - last_to_joint = true; - } + if let TokenTree::Leaf(Leaf::Punct(punct)) = tkn + && punct.spacing == Spacing::Joint + { + last_to_joint = true; } } last diff --git a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs index a03337dbc51ea..c6393cc6922a2 100644 --- a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs +++ b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs @@ -194,52 +194,49 @@ impl NotifyActor { } }, Event::NotifyEvent(event) => { - if let Some(event) = log_notify_error(event) { - if let EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) = + if let Some(event) = log_notify_error(event) + && let EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) = event.kind - { - let files = event - .paths - .into_iter() - .filter_map(|path| { - Some( - AbsPathBuf::try_from( - Utf8PathBuf::from_path_buf(path).ok()?, - ) + { + let files = event + .paths + .into_iter() + .filter_map(|path| { + Some( + AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).ok()?) .expect("path is absolute"), - ) - }) - .filter_map(|path| -> Option<(AbsPathBuf, Option>)> { - let meta = fs::metadata(&path).ok()?; - if meta.file_type().is_dir() - && self - .watched_dir_entries - .iter() - .any(|dir| dir.contains_dir(&path)) - { - self.watch(path.as_ref()); - return None; - } - - if !meta.file_type().is_file() { - return None; - } - - if !(self.watched_file_entries.contains(&path) - || self - .watched_dir_entries - .iter() - .any(|dir| dir.contains_file(&path))) - { - return None; - } - - let contents = read(&path); - Some((path, contents)) - }) - .collect(); - self.send(loader::Message::Changed { files }); - } + ) + }) + .filter_map(|path| -> Option<(AbsPathBuf, Option>)> { + let meta = fs::metadata(&path).ok()?; + if meta.file_type().is_dir() + && self + .watched_dir_entries + .iter() + .any(|dir| dir.contains_dir(&path)) + { + self.watch(path.as_ref()); + return None; + } + + if !meta.file_type().is_file() { + return None; + } + + if !(self.watched_file_entries.contains(&path) + || self + .watched_dir_entries + .iter() + .any(|dir| dir.contains_file(&path))) + { + return None; + } + + let contents = read(&path); + Some((path, contents)) + }) + .collect(); + self.send(loader::Message::Changed { files }); } } } diff --git a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md index ebac26e1d60a5..99a30d8f62138 100644 --- a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md +++ b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md @@ -2,8 +2,7 @@ Default: `false` -Whether to insert #[must_use] when generating `as_` methods -for enum variants. +Insert #[must_use] when generating `as_` methods for enum variants. ## rust-analyzer.assist.expressionFillDefault {#assist.expressionFillDefault} @@ -17,14 +16,15 @@ Placeholder expression to use for missing expressions in assists. Default: `false` -When inserting a type (e.g. in "fill match arms" assist), prefer to use `Self` over the type name where possible. +Prefer to use `Self` over the type name when inserting a type (e.g. in "fill match arms" assist). ## rust-analyzer.assist.termSearch.borrowcheck {#assist.termSearch.borrowcheck} Default: `true` -Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check. +Enable borrow checking for term search code assists. If set to false, also there will be +more suggestions, but some of them may not borrow-check. ## rust-analyzer.assist.termSearch.fuel {#assist.termSearch.fuel} @@ -45,7 +45,8 @@ Warm up caches on project load. Default: `"physical"` -How many worker threads to handle priming caches. The default `0` means to pick automatically. +How many worker threads to handle priming caches. The default `0` means to pick +automatically. ## rust-analyzer.cargo.allTargets {#cargo.allTargets} @@ -103,7 +104,9 @@ targets and features, with the following base command line: ```bash cargo check --quiet --workspace --message-format=json --all-targets --keep-going ``` -. + +Note: The option must be specified as an array of command line arguments, with +the first argument being the name of the command to run. ## rust-analyzer.cargo.buildScripts.rebuildOnSave {#cargo.buildScripts.rebuildOnSave} @@ -330,7 +333,9 @@ An example command would be: ```bash cargo check --workspace --message-format=json --all-targets ``` -. + +Note: The option must be specified as an array of command line arguments, with +the first argument being the name of the command to run. ## rust-analyzer.check.targets {#check.targets} @@ -358,7 +363,7 @@ check will be performed. Default: `true` -Whether to automatically add a semicolon when completing unit-returning functions. +Automatically add a semicolon when completing unit-returning functions. In `match` arms it completes a comma instead. @@ -367,22 +372,26 @@ In `match` arms it completes a comma instead. Default: `true` -Toggles the additional completions that automatically show method calls and field accesses with `await` prefixed to them when completing on a future. +Show method calls and field accesses completions with `await` prefixed to them when +completing on a future. ## rust-analyzer.completion.autoIter.enable {#completion.autoIter.enable} Default: `true` -Toggles the additional completions that automatically show method calls with `iter()` or `into_iter()` prefixed to them when completing on a type that has them. +Show method call completions with `iter()` or `into_iter()` prefixed to them when +completing on a type that has them. ## rust-analyzer.completion.autoimport.enable {#completion.autoimport.enable} Default: `true` -Toggles the additional completions that automatically add imports when completed. -Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. +Show completions that automatically add imports when completed. + +Note that your client must specify the `additionalTextEdits` LSP client capability to +truly have this feature enabled. ## rust-analyzer.completion.autoimport.exclude {#completion.autoimport.exclude} @@ -406,10 +415,11 @@ A list of full paths to items to exclude from auto-importing completions. Traits in this list won't have their methods suggested in completions unless the trait is in scope. -You can either specify a string path which defaults to type "always" or use the more verbose -form `{ "path": "path::to::item", type: "always" }`. +You can either specify a string path which defaults to type "always" or use the more +verbose form `{ "path": "path::to::item", type: "always" }`. -For traits the type "methods" can be used to only exclude the methods but not the trait itself. +For traits the type "methods" can be used to only exclude the methods but not the trait +itself. This setting also inherits `#rust-analyzer.completion.excludeTraits#`. @@ -418,15 +428,15 @@ This setting also inherits `#rust-analyzer.completion.excludeTraits#`. Default: `true` -Toggles the additional completions that automatically show method calls and field accesses -with `self` prefixed to them when inside a method. +Show method calls and field access completions with `self` prefixed to them when +inside a method. ## rust-analyzer.completion.callable.snippets {#completion.callable.snippets} Default: `"fill_arguments"` -Whether to add parenthesis and argument snippets when completing function. +Add parenthesis and argument snippets when completing function. ## rust-analyzer.completion.excludeTraits {#completion.excludeTraits} @@ -435,7 +445,9 @@ Default: `[]` A list of full paths to traits whose methods to exclude from completion. -Methods from these traits won't be completed, even if the trait is in scope. However, they will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or `T where T: Trait`. +Methods from these traits won't be completed, even if the trait is in scope. However, +they will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or +`T where T: Trait`. Note that the trait themselves can still be completed. @@ -444,14 +456,15 @@ Note that the trait themselves can still be completed. Default: `false` -Whether to show full function/method signatures in completion docs. +Show full function / method signatures in completion docs. ## rust-analyzer.completion.hideDeprecated {#completion.hideDeprecated} Default: `false` -Whether to omit deprecated items from autocompletion. By default they are marked as deprecated but not hidden. +Omit deprecated items from completions. By default they are marked as deprecated but not +hidden. ## rust-analyzer.completion.limit {#completion.limit} @@ -465,14 +478,15 @@ Maximum number of completions to return. If `None`, the limit is infinite. Default: `true` -Whether to show postfix snippets like `dbg`, `if`, `not`, etc. +Show postfix snippets like `dbg`, `if`, `not`, etc. ## rust-analyzer.completion.privateEditable.enable {#completion.privateEditable.enable} Default: `false` -Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position. +Show completions of private items and fields that are defined in the current workspace +even if they are not visible at the current position. ## rust-analyzer.completion.snippets.custom {#completion.snippets.custom} @@ -529,7 +543,7 @@ Custom completion snippets. Default: `false` -Whether to enable term search based snippets like `Some(foo.bar().baz())`. +Enable term search based snippets like `Some(foo.bar().baz())`. ## rust-analyzer.completion.termSearch.fuel {#completion.termSearch.fuel} @@ -550,30 +564,30 @@ List of rust-analyzer diagnostics to disable. Default: `true` -Whether to show native rust-analyzer diagnostics. +Show native rust-analyzer diagnostics. ## rust-analyzer.diagnostics.experimental.enable {#diagnostics.experimental.enable} Default: `false` -Whether to show experimental rust-analyzer diagnostics that might -have more false positives than usual. +Show experimental rust-analyzer diagnostics that might have more false positives than +usual. ## rust-analyzer.diagnostics.remapPrefix {#diagnostics.remapPrefix} Default: `{}` -Map of prefixes to be substituted when parsing diagnostic file paths. -This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`. +Map of prefixes to be substituted when parsing diagnostic file paths. This should be the +reverse mapping of what is passed to `rustc` as `--remap-path-prefix`. ## rust-analyzer.diagnostics.styleLints.enable {#diagnostics.styleLints.enable} Default: `false` -Whether to run additional style lints. +Run additional style lints. ## rust-analyzer.diagnostics.warningsAsHint {#diagnostics.warningsAsHint} @@ -582,8 +596,8 @@ Default: `[]` List of warnings that should be displayed with hint severity. -The warnings will be indicated by faded text or three dots in code -and will not show up in the `Problems Panel`. +The warnings will be indicated by faded text or three dots in code and will not show up +in the `Problems Panel`. ## rust-analyzer.diagnostics.warningsAsInfo {#diagnostics.warningsAsInfo} @@ -592,17 +606,19 @@ Default: `[]` List of warnings that should be displayed with info severity. -The warnings will be indicated by a blue squiggly underline in code -and a blue icon in the `Problems Panel`. +The warnings will be indicated by a blue squiggly underline in code and a blue icon in +the `Problems Panel`. ## rust-analyzer.files.exclude {#files.exclude} Default: `[]` -These paths (file/directories) will be ignored by rust-analyzer. They are -relative to the workspace root, and globs are not supported. You may -also need to add the folders to Code's `files.watcherExclude`. +List of files to ignore + +These paths (file/directories) will be ignored by rust-analyzer. They are relative to +the workspace root, and globs are not supported. You may also need to add the folders to +Code's `files.watcherExclude`. ## rust-analyzer.files.watcher {#files.watcher} @@ -616,64 +632,67 @@ Controls file watching implementation. Default: `true` -Enables highlighting of related return values while the cursor is on any `match`, `if`, or match arm arrow (`=>`). +Highlight related return values while the cursor is on any `match`, `if`, or match arm +arrow (`=>`). ## rust-analyzer.highlightRelated.breakPoints.enable {#highlightRelated.breakPoints.enable} Default: `true` -Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords. +Highlight related references while the cursor is on `break`, `loop`, `while`, or `for` +keywords. ## rust-analyzer.highlightRelated.closureCaptures.enable {#highlightRelated.closureCaptures.enable} Default: `true` -Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure. +Highlight all captures of a closure while the cursor is on the `|` or move keyword of a closure. ## rust-analyzer.highlightRelated.exitPoints.enable {#highlightRelated.exitPoints.enable} Default: `true` -Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`). +Highlight all exit points while the cursor is on any `return`, `?`, `fn`, or return type +arrow (`->`). ## rust-analyzer.highlightRelated.references.enable {#highlightRelated.references.enable} Default: `true` -Enables highlighting of related references while the cursor is on any identifier. +Highlight related references while the cursor is on any identifier. ## rust-analyzer.highlightRelated.yieldPoints.enable {#highlightRelated.yieldPoints.enable} Default: `true` -Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords. +Highlight all break points for a loop or block context while the cursor is on any +`async` or `await` keywords. ## rust-analyzer.hover.actions.debug.enable {#hover.actions.debug.enable} Default: `true` -Whether to show `Debug` action. Only applies when -`#rust-analyzer.hover.actions.enable#` is set. +Show `Debug` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set. ## rust-analyzer.hover.actions.enable {#hover.actions.enable} Default: `true` -Whether to show HoverActions in Rust files. +Show HoverActions in Rust files. ## rust-analyzer.hover.actions.gotoTypeDef.enable {#hover.actions.gotoTypeDef.enable} Default: `true` -Whether to show `Go to Type Definition` action. Only applies when +Show `Go to Type Definition` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set. @@ -681,46 +700,45 @@ Whether to show `Go to Type Definition` action. Only applies when Default: `true` -Whether to show `Implementations` action. Only applies when -`#rust-analyzer.hover.actions.enable#` is set. +Show `Implementations` action. Only applies when `#rust-analyzer.hover.actions.enable#` +is set. ## rust-analyzer.hover.actions.references.enable {#hover.actions.references.enable} Default: `false` -Whether to show `References` action. Only applies when -`#rust-analyzer.hover.actions.enable#` is set. +Show `References` action. Only applies when `#rust-analyzer.hover.actions.enable#` is +set. ## rust-analyzer.hover.actions.run.enable {#hover.actions.run.enable} Default: `true` -Whether to show `Run` action. Only applies when -`#rust-analyzer.hover.actions.enable#` is set. +Show `Run` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set. ## rust-analyzer.hover.actions.updateTest.enable {#hover.actions.updateTest.enable} Default: `true` -Whether to show `Update Test` action. Only applies when -`#rust-analyzer.hover.actions.enable#` and `#rust-analyzer.hover.actions.run.enable#` are set. +Show `Update Test` action. Only applies when `#rust-analyzer.hover.actions.enable#` and +`#rust-analyzer.hover.actions.run.enable#` are set. ## rust-analyzer.hover.documentation.enable {#hover.documentation.enable} Default: `true` -Whether to show documentation on hover. +Show documentation on hover. ## rust-analyzer.hover.documentation.keywords.enable {#hover.documentation.keywords.enable} Default: `true` -Whether to show keyword hover popups. Only applies when +Show keyword hover popups. Only applies when `#rust-analyzer.hover.documentation.enable#` is set. @@ -728,7 +746,7 @@ Whether to show keyword hover popups. Only applies when Default: `true` -Whether to show drop glue information on hover. +Show drop glue information on hover. ## rust-analyzer.hover.links.enable {#hover.links.enable} @@ -742,9 +760,11 @@ Use markdown syntax for links on hover. Default: `20` -Whether to show what types are used as generic arguments in calls etc. on hover, and what is their max length to show such types, beyond it they will be shown with ellipsis. +Show what types are used as generic arguments in calls etc. on hover, and limit the max +length to show such types, beyond which they will be shown with ellipsis. -This can take three values: `null` means "unlimited", the string `"hide"` means to not show generic substitutions at all, and a number means to limit them to X characters. +This can take three values: `null` means "unlimited", the string `"hide"` means to not +show generic substitutions at all, and a number means to limit them to X characters. The default is 20 characters. @@ -760,7 +780,7 @@ How to render the align information in a memory layout hover. Default: `true` -Whether to show memory layout data on hover. +Show memory layout data on hover. ## rust-analyzer.hover.memoryLayout.niches {#hover.memoryLayout.niches} @@ -802,7 +822,8 @@ How many variants of an enum to display when hovering on. Show none if empty. Default: `5` -How many fields of a struct, variant or union to display when hovering on. Show none if empty. +How many fields of a struct, variant or union to display when hovering on. Show none if +empty. ## rust-analyzer.hover.show.traitAssocItems {#hover.show.traitAssocItems} @@ -816,7 +837,8 @@ How many associated items of a trait to display when hovering a trait. Default: `false` -Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file. +Enforce the import granularity setting for all files. If set to false rust-analyzer will +try to keep import styles consistent per file. ## rust-analyzer.imports.granularity.group {#imports.granularity.group} @@ -830,14 +852,17 @@ How imports should be grouped into use statements. Default: `true` -Group inserted imports by the [following order](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are separated by newlines. +Group inserted imports by the [following +order](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are +separated by newlines. ## rust-analyzer.imports.merge.glob {#imports.merge.glob} Default: `true` -Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`. +Allow import insertion to merge new imports into single path glob imports like `use +std::fmt::*;`. ## rust-analyzer.imports.preferNoStd {#imports.preferNoStd} @@ -851,7 +876,7 @@ Prefer to unconditionally use imports of the core and alloc crate, over the std Default: `false` -Whether to prefer import paths containing a `prelude` module. +Prefer import paths containing a `prelude` module. ## rust-analyzer.imports.prefix {#imports.prefix} @@ -865,28 +890,30 @@ The path structure for newly inserted paths to use. Default: `false` -Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;". +Prefix external (including std, core) crate imports with `::`. + +E.g. `use ::std::io::Read;`. ## rust-analyzer.inlayHints.bindingModeHints.enable {#inlayHints.bindingModeHints.enable} Default: `false` -Whether to show inlay type hints for binding modes. +Show inlay type hints for binding modes. ## rust-analyzer.inlayHints.chainingHints.enable {#inlayHints.chainingHints.enable} Default: `true` -Whether to show inlay type hints for method chains. +Show inlay type hints for method chains. ## rust-analyzer.inlayHints.closingBraceHints.enable {#inlayHints.closingBraceHints.enable} Default: `true` -Whether to show inlay hints after a closing `}` to indicate what item it belongs to. +Show inlay hints after a closing `}` to indicate what item it belongs to. ## rust-analyzer.inlayHints.closingBraceHints.minLines {#inlayHints.closingBraceHints.minLines} @@ -901,14 +928,14 @@ to always show them). Default: `false` -Whether to show inlay hints for closure captures. +Show inlay hints for closure captures. ## rust-analyzer.inlayHints.closureReturnTypeHints.enable {#inlayHints.closureReturnTypeHints.enable} Default: `"never"` -Whether to show inlay type hints for return types of closures. +Show inlay type hints for return types of closures. ## rust-analyzer.inlayHints.closureStyle {#inlayHints.closureStyle} @@ -922,77 +949,77 @@ Closure notation in type and chaining inlay hints. Default: `"never"` -Whether to show enum variant discriminant hints. +Show enum variant discriminant hints. ## rust-analyzer.inlayHints.expressionAdjustmentHints.enable {#inlayHints.expressionAdjustmentHints.enable} Default: `"never"` -Whether to show inlay hints for type adjustments. +Show inlay hints for type adjustments. ## rust-analyzer.inlayHints.expressionAdjustmentHints.hideOutsideUnsafe {#inlayHints.expressionAdjustmentHints.hideOutsideUnsafe} Default: `false` -Whether to hide inlay hints for type adjustments outside of `unsafe` blocks. +Hide inlay hints for type adjustments outside of `unsafe` blocks. ## rust-analyzer.inlayHints.expressionAdjustmentHints.mode {#inlayHints.expressionAdjustmentHints.mode} Default: `"prefix"` -Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). +Show inlay hints as postfix ops (`.*` instead of `*`, etc). ## rust-analyzer.inlayHints.genericParameterHints.const.enable {#inlayHints.genericParameterHints.const.enable} Default: `true` -Whether to show const generic parameter name inlay hints. +Show const generic parameter name inlay hints. ## rust-analyzer.inlayHints.genericParameterHints.lifetime.enable {#inlayHints.genericParameterHints.lifetime.enable} Default: `false` -Whether to show generic lifetime parameter name inlay hints. +Show generic lifetime parameter name inlay hints. ## rust-analyzer.inlayHints.genericParameterHints.type.enable {#inlayHints.genericParameterHints.type.enable} Default: `false` -Whether to show generic type parameter name inlay hints. +Show generic type parameter name inlay hints. ## rust-analyzer.inlayHints.implicitDrops.enable {#inlayHints.implicitDrops.enable} Default: `false` -Whether to show implicit drop hints. +Show implicit drop hints. ## rust-analyzer.inlayHints.implicitSizedBoundHints.enable {#inlayHints.implicitSizedBoundHints.enable} Default: `false` -Whether to show inlay hints for the implied type parameter `Sized` bound. +Show inlay hints for the implied type parameter `Sized` bound. ## rust-analyzer.inlayHints.lifetimeElisionHints.enable {#inlayHints.lifetimeElisionHints.enable} Default: `"never"` -Whether to show inlay type hints for elided lifetimes in function signatures. +Show inlay type hints for elided lifetimes in function signatures. ## rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames {#inlayHints.lifetimeElisionHints.useParameterNames} Default: `false` -Whether to prefer using parameter names as the name for elided lifetime hints if possible. +Prefer using parameter names as the name for elided lifetime hints if possible. ## rust-analyzer.inlayHints.maxLength {#inlayHints.maxLength} @@ -1006,23 +1033,24 @@ Maximum length for inlay hints. Set to null to have an unlimited length. Default: `true` -Whether to show function parameter name inlay hints at the call -site. +Show function parameter name inlay hints at the call site. ## rust-analyzer.inlayHints.rangeExclusiveHints.enable {#inlayHints.rangeExclusiveHints.enable} Default: `false` -Whether to show exclusive range inlay hints. +Show exclusive range inlay hints. ## rust-analyzer.inlayHints.reborrowHints.enable {#inlayHints.reborrowHints.enable} Default: `"never"` -Whether to show inlay hints for compiler inserted reborrows. -This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#. +Show inlay hints for compiler inserted reborrows. + +This setting is deprecated in favor of +#rust-analyzer.inlayHints.expressionAdjustmentHints.enable#. ## rust-analyzer.inlayHints.renderColons {#inlayHints.renderColons} @@ -1036,36 +1064,38 @@ Whether to render leading colons for type hints, and trailing colons for paramet Default: `true` -Whether to show inlay type hints for variables. +Show inlay type hints for variables. ## rust-analyzer.inlayHints.typeHints.hideClosureInitialization {#inlayHints.typeHints.hideClosureInitialization} Default: `false` -Whether to hide inlay type hints for `let` statements that initialize to a closure. -Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`. +Hide inlay type hints for `let` statements that initialize to a closure. + +Only applies to closures with blocks, same as +`#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`. ## rust-analyzer.inlayHints.typeHints.hideClosureParameter {#inlayHints.typeHints.hideClosureParameter} Default: `false` -Whether to hide inlay parameter type hints for closures. +Hide inlay parameter type hints for closures. ## rust-analyzer.inlayHints.typeHints.hideNamedConstructor {#inlayHints.typeHints.hideNamedConstructor} Default: `false` -Whether to hide inlay type hints for constructors. +Hide inlay type hints for constructors. ## rust-analyzer.interpret.tests {#interpret.tests} Default: `false` -Enables the experimental support for interpreting tests. +Enable the experimental support for interpreting tests. ## rust-analyzer.joinLines.joinAssignments {#joinLines.joinAssignments} @@ -1100,23 +1130,21 @@ Join lines unwraps trivial blocks. Default: `true` -Whether to show `Debug` lens. Only applies when -`#rust-analyzer.lens.enable#` is set. +Show `Debug` lens. Only applies when `#rust-analyzer.lens.enable#` is set. ## rust-analyzer.lens.enable {#lens.enable} Default: `true` -Whether to show CodeLens in Rust files. +Show CodeLens in Rust files. ## rust-analyzer.lens.implementations.enable {#lens.implementations.enable} Default: `true` -Whether to show `Implementations` lens. Only applies when -`#rust-analyzer.lens.enable#` is set. +Show `Implementations` lens. Only applies when `#rust-analyzer.lens.enable#` is set. ## rust-analyzer.lens.location {#lens.location} @@ -1130,60 +1158,56 @@ Where to render annotations. Default: `false` -Whether to show `References` lens for Struct, Enum, and Union. -Only applies when `#rust-analyzer.lens.enable#` is set. +Show `References` lens for Struct, Enum, and Union. Only applies when +`#rust-analyzer.lens.enable#` is set. ## rust-analyzer.lens.references.enumVariant.enable {#lens.references.enumVariant.enable} Default: `false` -Whether to show `References` lens for Enum Variants. -Only applies when `#rust-analyzer.lens.enable#` is set. +Show `References` lens for Enum Variants. Only applies when +`#rust-analyzer.lens.enable#` is set. ## rust-analyzer.lens.references.method.enable {#lens.references.method.enable} Default: `false` -Whether to show `Method References` lens. Only applies when -`#rust-analyzer.lens.enable#` is set. +Show `Method References` lens. Only applies when `#rust-analyzer.lens.enable#` is set. ## rust-analyzer.lens.references.trait.enable {#lens.references.trait.enable} Default: `false` -Whether to show `References` lens for Trait. -Only applies when `#rust-analyzer.lens.enable#` is set. +Show `References` lens for Trait. Only applies when `#rust-analyzer.lens.enable#` is +set. ## rust-analyzer.lens.run.enable {#lens.run.enable} Default: `true` -Whether to show `Run` lens. Only applies when -`#rust-analyzer.lens.enable#` is set. +Show `Run` lens. Only applies when `#rust-analyzer.lens.enable#` is set. ## rust-analyzer.lens.updateTest.enable {#lens.updateTest.enable} Default: `true` -Whether to show `Update Test` lens. Only applies when -`#rust-analyzer.lens.enable#` and `#rust-analyzer.lens.run.enable#` are set. +Show `Update Test` lens. Only applies when `#rust-analyzer.lens.enable#` and +`#rust-analyzer.lens.run.enable#` are set. ## rust-analyzer.linkedProjects {#linkedProjects} Default: `[]` -Disable project auto-discovery in favor of explicitly specified set -of projects. +Disable project auto-discovery in favor of explicitly specified set of projects. -Elements must be paths pointing to `Cargo.toml`, -`rust-project.json`, `.rs` files (which will be treated as standalone files) or JSON -objects in `rust-project.json` format. +Elements must be paths pointing to `Cargo.toml`, `rust-project.json`, `.rs` files (which +will be treated as standalone files) or JSON objects in `rust-project.json` format. ## rust-analyzer.lru.capacity {#lru.capacity} @@ -1197,21 +1221,22 @@ Number of syntax trees rust-analyzer keeps in memory. Defaults to 128. Default: `{}` -Sets the LRU capacity of the specified queries. +The LRU capacity of the specified queries. ## rust-analyzer.notifications.cargoTomlNotFound {#notifications.cargoTomlNotFound} Default: `true` -Whether to show `can't find Cargo.toml` error message. +Show `can't find Cargo.toml` error message. ## rust-analyzer.numThreads {#numThreads} Default: `null` -How many worker threads in the main loop. The default `null` means to pick automatically. +The number of worker threads in the main loop. The default `null` means to pick +automatically. ## rust-analyzer.procMacro.attributes.enable {#procMacro.attributes.enable} @@ -1322,6 +1347,9 @@ not that of `cargo fmt`. The file contents will be passed on the standard input and the formatted result will be read from the standard output. +Note: The option must be specified as an array of command line arguments, with +the first argument being the name of the command to run. + ## rust-analyzer.rustfmt.rangeFormatting.enable {#rustfmt.rangeFormatting.enable} @@ -1346,7 +1374,10 @@ doc links. Default: `true` -Whether the server is allowed to emit non-standard tokens and modifiers. +Emit non-standard tokens and modifiers + +When enabled, rust-analyzer will emit tokens and modifiers that are not part of the +standard set of semantic tokens. ## rust-analyzer.semanticHighlighting.operator.enable {#semanticHighlighting.operator.enable} @@ -1427,11 +1458,15 @@ Show documentation. Default: `"=."` Specify the characters allowed to invoke special on typing triggers. -- typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing expression + +- typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing + expression - typing `=` between two expressions adds `;` when in statement position -- typing `=` to turn an assignment into an equality comparison removes `;` when in expression position +- typing `=` to turn an assignment into an equality comparison removes `;` when in + expression position - typing `.` in a chain method call auto-indents -- typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the expression +- typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the + expression - typing `{` in a use item adds a closing `}` in the right place - typing `>` to complete a return type `->` will insert a whitespace after it - typing `<` in a path or type position inserts a closing `>` after the path or type. @@ -1475,8 +1510,8 @@ Below is an example of a valid configuration: **Warning**: This format is provisional and subject to change. -[`DiscoverWorkspaceConfig::command`] *must* return a JSON object -corresponding to `DiscoverProjectData::Finished`: +[`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to +`DiscoverProjectData::Finished`: ```norun #[derive(Debug, Clone, Deserialize, Serialize)] @@ -1506,12 +1541,11 @@ As JSON, `DiscoverProjectData::Finished` is: } ``` -It is encouraged, but not required, to use the other variants on -`DiscoverProjectData` to provide a more polished end-user experience. +It is encouraged, but not required, to use the other variants on `DiscoverProjectData` +to provide a more polished end-user experience. -`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, -which will be substituted with the JSON-serialized form of the following -enum: +`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be +substituted with the JSON-serialized form of the following enum: ```norun #[derive(PartialEq, Clone, Debug, Serialize)] @@ -1538,11 +1572,10 @@ Similarly, the JSON representation of `DiscoverArgument::Buildfile` is: } ``` -`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, -and therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to -to update an existing workspace. As a reference for implementors, -buck2's `rust-project` will likely be useful: -https://github.com/facebook/buck2/tree/main/integrations/rust-project. +`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and +therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an +existing workspace. As a reference for implementors, buck2's `rust-project` will likely +be useful: https://github.com/facebook/buck2/tree/main/integrations/rust-project. ## rust-analyzer.workspace.symbol.search.excludeImports {#workspace.symbol.search.excludeImports} diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/style.md b/src/tools/rust-analyzer/docs/book/src/contributing/style.md index 5654e37753a57..746f3eb132117 100644 --- a/src/tools/rust-analyzer/docs/book/src/contributing/style.md +++ b/src/tools/rust-analyzer/docs/book/src/contributing/style.md @@ -49,8 +49,8 @@ In this case, we'll probably ask you to split API changes into a separate PR. Changes of the third group should be pretty rare, so we don't specify any specific process for them. That said, adding an innocent-looking `pub use` is a very simple way to break encapsulation, keep an eye on it! -Note: if you enjoyed this abstract hand-waving about boundaries, you might appreciate -https://www.tedinski.com/2018/02/06/system-boundaries.html +Note: if you enjoyed this abstract hand-waving about boundaries, you might appreciate [this post](https://www.tedinski.com/2018/02/06/system-boundaries.html). + ## Crates.io Dependencies @@ -231,7 +231,7 @@ fn is_string_literal(s: &str) -> bool { } ``` -In the "Not as good" version, the precondition that `1` is a valid char boundary is checked in `is_string_literal` and used in `foo`. +In the "Bad" version, the precondition that `1` and `s.len() - 1` are valid string literal boundaries is checked in `is_string_literal` but used in `main`. In the "Good" version, the precondition check and usage are checked in the same block, and then encoded in the types. **Rationale:** non-local code properties degrade under change. @@ -271,6 +271,8 @@ fn f() { } ``` +See also [this post](https://matklad.github.io/2023/11/15/push-ifs-up-and-fors-down.html) + ## Assertions Assert liberally. @@ -608,7 +610,7 @@ Avoid making a lot of code type parametric, *especially* on the boundaries betwe ```rust // GOOD -fn frobnicate(f: impl FnMut()) { +fn frobnicate(mut f: impl FnMut()) { frobnicate_impl(&mut f) } fn frobnicate_impl(f: &mut dyn FnMut()) { @@ -616,7 +618,7 @@ fn frobnicate_impl(f: &mut dyn FnMut()) { } // BAD -fn frobnicate(f: impl FnMut()) { +fn frobnicate(mut f: impl FnMut()) { // lots of code } ``` @@ -975,7 +977,7 @@ Don't use the `ref` keyword. **Rationale:** consistency & simplicity. `ref` was required before [match ergonomics](https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md). Today, it is redundant. -Between `ref` and mach ergonomics, the latter is more ergonomic in most cases, and is simpler (does not require a keyword). +Between `ref` and match ergonomics, the latter is more ergonomic in most cases, and is simpler (does not require a keyword). ## Empty Match Arms diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 3cb4c21ee1fb2..470db244f14bd 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -354,35 +354,122 @@ ], "configuration": [ { - "title": "general", + "title": "Rust Analyzer" + }, + { + "title": "Assist" + }, + { + "title": "Cache Priming" + }, + { + "title": "Cargo" + }, + { + "title": "Cfg" + }, + { + "title": "Check" + }, + { + "title": "Completion" + }, + { + "title": "Debug" + }, + { + "title": "Diagnostics" + }, + { + "title": "Files" + }, + { + "title": "Highlight Related" + }, + { + "title": "Hover" + }, + { + "title": "Imports" + }, + { + "title": "Inlay Hints" + }, + { + "title": "Interpret" + }, + { + "title": "Join Lines" + }, + { + "title": "Lens" + }, + { + "title": "Lru" + }, + { + "title": "Notifications" + }, + { + "title": "Proc Macro" + }, + { + "title": "References" + }, + { + "title": "Runnables" + }, + { + "title": "Rustc" + }, + { + "title": "Rustfmt" + }, + { + "title": "Semantic Highlighting" + }, + { + "title": "Signature Info" + }, + { + "title": "Typing" + }, + { + "title": "Vfs" + }, + { + "title": "Workspace" + }, + { + "title": "rust-analyzer", "properties": { "rust-analyzer.restartServerOnConfigChange": { - "markdownDescription": "Whether to restart the server automatically when certain settings that require a restart are changed.", + "description": "Restart the server automatically when settings that require a restart are changed.", "default": false, "type": "boolean" }, "rust-analyzer.showUnlinkedFileNotification": { - "markdownDescription": "Whether to show a notification for unlinked files asking the user to add the corresponding Cargo.toml to the linked projects setting.", + "description": "Show a notification for unlinked files, prompting the user to add the corresponding Cargo.toml to the linked projects setting.", "default": true, "type": "boolean" }, "rust-analyzer.showRequestFailedErrorNotification": { - "markdownDescription": "Whether to show error notifications for failing requests.", + "description": "Show error notifications when requests fail.", "default": true, "type": "boolean" }, "rust-analyzer.showDependenciesExplorer": { - "markdownDescription": "Whether to show the dependencies view.", + "description": "Show Rust Dependencies in the Explorer view.", "default": true, "type": "boolean" }, "rust-analyzer.showSyntaxTree": { - "markdownDescription": "Whether to show the syntax tree view.", + "description": "Show Syntax Tree in the Explorer view.", "default": false, "type": "boolean" }, "rust-analyzer.testExplorer": { - "markdownDescription": "Whether to show the test explorer.", + "description": "Show the Test Explorer view.", "default": false, "type": "boolean" }, @@ -394,7 +481,7 @@ } }, { - "title": "runnables", + "title": "Runnables", "properties": { "rust-analyzer.runnables.extraEnv": { "anyOf": [ @@ -452,7 +539,7 @@ } }, { - "title": "statusBar", + "title": "Status Bar", "properties": { "rust-analyzer.statusBar.clickAction": { "type": "string", @@ -524,7 +611,7 @@ } }, { - "title": "server", + "title": "Server", "properties": { "rust-analyzer.server.path": { "type": [ @@ -553,7 +640,7 @@ } }, { - "title": "trace", + "title": "Trace", "properties": { "rust-analyzer.trace.server": { "type": "string", @@ -580,7 +667,7 @@ } }, { - "title": "debug", + "title": "Debug", "properties": { "rust-analyzer.debug.engine": { "type": "string", @@ -625,7 +712,7 @@ } }, { - "title": "typing", + "title": "Typing", "properties": { "rust-analyzer.typing.continueCommentsOnNewline": { "markdownDescription": "Whether to prefix newlines after comments with the corresponding comment prefix.", @@ -635,7 +722,7 @@ } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.previewRustcOutput": { "markdownDescription": "Whether to show the main part of the rendered rustc output of a diagnostic message.", @@ -653,17 +740,17 @@ "title": "$generated-start" }, { - "title": "assist", + "title": "Assist", "properties": { "rust-analyzer.assist.emitMustUse": { - "markdownDescription": "Whether to insert #[must_use] when generating `as_` methods\nfor enum variants.", + "markdownDescription": "Insert #[must_use] when generating `as_` methods for enum variants.", "default": false, "type": "boolean" } } }, { - "title": "assist", + "title": "Assist", "properties": { "rust-analyzer.assist.expressionFillDefault": { "markdownDescription": "Placeholder expression to use for missing expressions in assists.", @@ -681,27 +768,27 @@ } }, { - "title": "assist", + "title": "Assist", "properties": { "rust-analyzer.assist.preferSelf": { - "markdownDescription": "When inserting a type (e.g. in \"fill match arms\" assist), prefer to use `Self` over the type name where possible.", + "markdownDescription": "Prefer to use `Self` over the type name when inserting a type (e.g. in \"fill match arms\" assist).", "default": false, "type": "boolean" } } }, { - "title": "assist", + "title": "Assist", "properties": { "rust-analyzer.assist.termSearch.borrowcheck": { - "markdownDescription": "Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check.", + "markdownDescription": "Enable borrow checking for term search code assists. If set to false, also there will be\nmore suggestions, but some of them may not borrow-check.", "default": true, "type": "boolean" } } }, { - "title": "assist", + "title": "Assist", "properties": { "rust-analyzer.assist.termSearch.fuel": { "markdownDescription": "Term search fuel in \"units of work\" for assists (Defaults to 1800).", @@ -712,7 +799,7 @@ } }, { - "title": "cachePriming", + "title": "Cache Priming", "properties": { "rust-analyzer.cachePriming.enable": { "markdownDescription": "Warm up caches on project load.", @@ -722,10 +809,10 @@ } }, { - "title": "cachePriming", + "title": "Cache Priming", "properties": { "rust-analyzer.cachePriming.numThreads": { - "markdownDescription": "How many worker threads to handle priming caches. The default `0` means to pick automatically.", + "markdownDescription": "How many worker threads to handle priming caches. The default `0` means to pick\nautomatically.", "default": "physical", "anyOf": [ { @@ -749,7 +836,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.allTargets": { "markdownDescription": "Pass `--all-targets` to cargo invocation.", @@ -759,7 +846,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.autoreload": { "markdownDescription": "Automatically refresh project info via `cargo metadata` on\n`Cargo.toml` or `.cargo/config.toml` changes.", @@ -769,7 +856,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.buildScripts.enable": { "markdownDescription": "Run build scripts (`build.rs`) for more precise code analysis.", @@ -779,7 +866,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.buildScripts.invocationStrategy": { "markdownDescription": "Specifies the invocation strategy to use when running the build scripts command.\nIf `per_workspace` is set, the command will be executed for each Rust workspace with the\nworkspace as the working directory.\nIf `once` is set, the command will be executed once with the opened project as the\nworking directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", @@ -797,10 +884,10 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.buildScripts.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#`.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets --keep-going\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#`.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets --keep-going\n```\n\nNote: The option must be specified as an array of command line arguments, with\nthe first argument being the name of the command to run.", "default": null, "type": [ "null", @@ -813,7 +900,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.buildScripts.rebuildOnSave": { "markdownDescription": "Rerun proc-macros building/build-scripts running when proc-macro\nor build-script sources change and are saved.", @@ -823,7 +910,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.buildScripts.useRustcWrapper": { "markdownDescription": "Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to\navoid checking unnecessary things.", @@ -833,7 +920,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.cfgs": { "markdownDescription": "List of cfg options to enable with the given values.\n\nTo enable a name without a value, use `\"key\"`.\nTo enable a name with a value, use `\"key=value\"`.\nTo disable, prefix the entry with a `!`.", @@ -849,7 +936,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.extraArgs": { "markdownDescription": "Extra arguments that are passed to every cargo invocation.", @@ -862,7 +949,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.extraEnv": { "markdownDescription": "Extra environment variables that will be set when running cargo, rustc\nor other commands within the workspace. Useful for setting RUSTFLAGS.", @@ -872,7 +959,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.features": { "markdownDescription": "List of features to activate.\n\nSet this to `\"all\"` to pass `--all-features` to cargo.", @@ -898,7 +985,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.noDefaultFeatures": { "markdownDescription": "Whether to pass `--no-default-features` to cargo.", @@ -908,7 +995,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.noDeps": { "markdownDescription": "Whether to skip fetching dependencies. If set to \"true\", the analysis is performed\nentirely offline, and Cargo metadata for dependencies is not fetched.", @@ -918,7 +1005,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.sysroot": { "markdownDescription": "Relative path to the sysroot, or \"discover\" to try to automatically find it via\n\"rustc --print sysroot\".\n\nUnsetting this disables sysroot loading.\n\nThis option does not take effect until rust-analyzer is restarted.", @@ -931,7 +1018,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.sysrootSrc": { "markdownDescription": "Relative path to the sysroot library sources. If left unset, this will default to\n`{cargo.sysroot}/lib/rustlib/src/rust/library`.\n\nThis option does not take effect until rust-analyzer is restarted.", @@ -944,7 +1031,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.target": { "markdownDescription": "Compilation target override (target tuple).", @@ -957,7 +1044,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.targetDir": { "markdownDescription": "Optional path to a rust-analyzer specific target directory.\nThis prevents rust-analyzer's `cargo check` and initial build-script and proc-macro\nbuilding from locking the `Cargo.lock` at the expense of duplicating build artifacts.\n\nSet to `true` to use a subdirectory of the existing target directory or\nset to a path relative to the workspace to use that path.", @@ -977,7 +1064,7 @@ } }, { - "title": "cfg", + "title": "Cfg", "properties": { "rust-analyzer.cfg.setTest": { "markdownDescription": "Set `cfg(test)` for local crates. Defaults to true.", @@ -987,7 +1074,7 @@ } }, { - "title": "general", + "title": "rust-analyzer", "properties": { "rust-analyzer.checkOnSave": { "markdownDescription": "Run the check command for diagnostics on save.", @@ -997,7 +1084,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.allTargets": { "markdownDescription": "Check all targets and tests (`--all-targets`). Defaults to\n`#rust-analyzer.cargo.allTargets#`.", @@ -1010,7 +1097,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.command": { "markdownDescription": "Cargo command to use for `cargo check`.", @@ -1020,7 +1107,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.extraArgs": { "markdownDescription": "Extra arguments for `cargo check`.", @@ -1033,7 +1120,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.extraEnv": { "markdownDescription": "Extra environment variables that will be set when running `cargo check`.\nExtends `#rust-analyzer.cargo.extraEnv#`.", @@ -1043,7 +1130,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.features": { "markdownDescription": "List of features to activate. Defaults to\n`#rust-analyzer.cargo.features#`.\n\nSet to `\"all\"` to pass `--all-features` to Cargo.", @@ -1072,7 +1159,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.ignore": { "markdownDescription": "List of `cargo check` (or other command specified in `check.command`) diagnostics to ignore.\n\nFor example for `cargo check`: `dead_code`, `unused_imports`, `unused_variables`,...", @@ -1086,7 +1173,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.invocationStrategy": { "markdownDescription": "Specifies the invocation strategy to use when running the check command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.check.overrideCommand#`\nis set.", @@ -1104,7 +1191,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.noDefaultFeatures": { "markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.", @@ -1117,10 +1204,10 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the future.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the future.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n\nNote: The option must be specified as an array of command line arguments, with\nthe first argument being the name of the command to run.", "default": null, "type": [ "null", @@ -1133,7 +1220,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.targets": { "markdownDescription": "Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.\n\nCan be a single target, e.g. `\"x86_64-unknown-linux-gnu\"` or a list of targets, e.g.\n`[\"aarch64-apple-darwin\", \"x86_64-apple-darwin\"]`.\n\nAliased as `\"checkOnSave.targets\"`.", @@ -1156,7 +1243,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.workspace": { "markdownDescription": "Whether `--workspace` should be passed to `cargo check`.\nIf false, `-p ` will be passed instead if applicable. In case it is not, no\ncheck will be performed.", @@ -1166,50 +1253,50 @@ } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.addSemicolonToUnit": { - "markdownDescription": "Whether to automatically add a semicolon when completing unit-returning functions.\n\nIn `match` arms it completes a comma instead.", + "markdownDescription": "Automatically add a semicolon when completing unit-returning functions.\n\nIn `match` arms it completes a comma instead.", "default": true, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.autoAwait.enable": { - "markdownDescription": "Toggles the additional completions that automatically show method calls and field accesses with `await` prefixed to them when completing on a future.", + "markdownDescription": "Show method calls and field accesses completions with `await` prefixed to them when\ncompleting on a future.", "default": true, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.autoIter.enable": { - "markdownDescription": "Toggles the additional completions that automatically show method calls with `iter()` or `into_iter()` prefixed to them when completing on a type that has them.", + "markdownDescription": "Show method call completions with `iter()` or `into_iter()` prefixed to them when\ncompleting on a type that has them.", "default": true, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.autoimport.enable": { - "markdownDescription": "Toggles the additional completions that automatically add imports when completed.\nNote that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.", + "markdownDescription": "Show completions that automatically add imports when completed.\n\nNote that your client must specify the `additionalTextEdits` LSP client capability to\ntruly have this feature enabled.", "default": true, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.autoimport.exclude": { - "markdownDescription": "A list of full paths to items to exclude from auto-importing completions.\n\nTraits in this list won't have their methods suggested in completions unless the trait\nis in scope.\n\nYou can either specify a string path which defaults to type \"always\" or use the more verbose\nform `{ \"path\": \"path::to::item\", type: \"always\" }`.\n\nFor traits the type \"methods\" can be used to only exclude the methods but not the trait itself.\n\nThis setting also inherits `#rust-analyzer.completion.excludeTraits#`.", + "markdownDescription": "A list of full paths to items to exclude from auto-importing completions.\n\nTraits in this list won't have their methods suggested in completions unless the trait\nis in scope.\n\nYou can either specify a string path which defaults to type \"always\" or use the more\nverbose form `{ \"path\": \"path::to::item\", type: \"always\" }`.\n\nFor traits the type \"methods\" can be used to only exclude the methods but not the trait\nitself.\n\nThis setting also inherits `#rust-analyzer.completion.excludeTraits#`.", "default": [ { "path": "core::borrow::Borrow", @@ -1251,20 +1338,20 @@ } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.autoself.enable": { - "markdownDescription": "Toggles the additional completions that automatically show method calls and field accesses\nwith `self` prefixed to them when inside a method.", + "markdownDescription": "Show method calls and field access completions with `self` prefixed to them when\ninside a method.", "default": true, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.callable.snippets": { - "markdownDescription": "Whether to add parenthesis and argument snippets when completing function.", + "markdownDescription": "Add parenthesis and argument snippets when completing function.", "default": "fill_arguments", "type": "string", "enum": [ @@ -1281,10 +1368,10 @@ } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.excludeTraits": { - "markdownDescription": "A list of full paths to traits whose methods to exclude from completion.\n\nMethods from these traits won't be completed, even if the trait is in scope. However, they will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or `T where T: Trait`.\n\nNote that the trait themselves can still be completed.", + "markdownDescription": "A list of full paths to traits whose methods to exclude from completion.\n\nMethods from these traits won't be completed, even if the trait is in scope. However,\nthey will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or\n`T where T: Trait`.\n\nNote that the trait themselves can still be completed.", "default": [], "type": "array", "items": { @@ -1294,27 +1381,27 @@ } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.fullFunctionSignatures.enable": { - "markdownDescription": "Whether to show full function/method signatures in completion docs.", + "markdownDescription": "Show full function / method signatures in completion docs.", "default": false, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.hideDeprecated": { - "markdownDescription": "Whether to omit deprecated items from autocompletion. By default they are marked as deprecated but not hidden.", + "markdownDescription": "Omit deprecated items from completions. By default they are marked as deprecated but not\nhidden.", "default": false, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.limit": { "markdownDescription": "Maximum number of completions to return. If `None`, the limit is infinite.", @@ -1328,27 +1415,27 @@ } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.postfix.enable": { - "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc.", + "markdownDescription": "Show postfix snippets like `dbg`, `if`, `not`, etc.", "default": true, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.privateEditable.enable": { - "markdownDescription": "Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.", + "markdownDescription": "Show completions of private items and fields that are defined in the current workspace\neven if they are not visible at the current position.", "default": false, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.snippets.custom": { "markdownDescription": "Custom completion snippets.", @@ -1398,17 +1485,17 @@ } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.termSearch.enable": { - "markdownDescription": "Whether to enable term search based snippets like `Some(foo.bar().baz())`.", + "markdownDescription": "Enable term search based snippets like `Some(foo.bar().baz())`.", "default": false, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.termSearch.fuel": { "markdownDescription": "Term search fuel in \"units of work\" for autocompletion (Defaults to 1000).", @@ -1419,7 +1506,7 @@ } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.disabled": { "markdownDescription": "List of rust-analyzer diagnostics to disable.", @@ -1433,50 +1520,50 @@ } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.enable": { - "markdownDescription": "Whether to show native rust-analyzer diagnostics.", + "markdownDescription": "Show native rust-analyzer diagnostics.", "default": true, "type": "boolean" } } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.experimental.enable": { - "markdownDescription": "Whether to show experimental rust-analyzer diagnostics that might\nhave more false positives than usual.", + "markdownDescription": "Show experimental rust-analyzer diagnostics that might have more false positives than\nusual.", "default": false, "type": "boolean" } } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.remapPrefix": { - "markdownDescription": "Map of prefixes to be substituted when parsing diagnostic file paths.\nThis should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.", + "markdownDescription": "Map of prefixes to be substituted when parsing diagnostic file paths. This should be the\nreverse mapping of what is passed to `rustc` as `--remap-path-prefix`.", "default": {}, "type": "object" } } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.styleLints.enable": { - "markdownDescription": "Whether to run additional style lints.", + "markdownDescription": "Run additional style lints.", "default": false, "type": "boolean" } } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.warningsAsHint": { - "markdownDescription": "List of warnings that should be displayed with hint severity.\n\nThe warnings will be indicated by faded text or three dots in code\nand will not show up in the `Problems Panel`.", + "markdownDescription": "List of warnings that should be displayed with hint severity.\n\nThe warnings will be indicated by faded text or three dots in code and will not show up\nin the `Problems Panel`.", "default": [], "type": "array", "items": { @@ -1486,10 +1573,10 @@ } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.warningsAsInfo": { - "markdownDescription": "List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code\nand a blue icon in the `Problems Panel`.", + "markdownDescription": "List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code and a blue icon in\nthe `Problems Panel`.", "default": [], "type": "array", "items": { @@ -1499,10 +1586,10 @@ } }, { - "title": "files", + "title": "Files", "properties": { "rust-analyzer.files.exclude": { - "markdownDescription": "These paths (file/directories) will be ignored by rust-analyzer. They are\nrelative to the workspace root, and globs are not supported. You may\nalso need to add the folders to Code's `files.watcherExclude`.", + "markdownDescription": "List of files to ignore\n\nThese paths (file/directories) will be ignored by rust-analyzer. They are relative to\nthe workspace root, and globs are not supported. You may also need to add the folders to\nCode's `files.watcherExclude`.", "default": [], "type": "array", "items": { @@ -1512,7 +1599,7 @@ } }, { - "title": "files", + "title": "Files", "properties": { "rust-analyzer.files.watcher": { "markdownDescription": "Controls file watching implementation.", @@ -1530,167 +1617,167 @@ } }, { - "title": "highlightRelated", + "title": "Highlight Related", "properties": { "rust-analyzer.highlightRelated.branchExitPoints.enable": { - "markdownDescription": "Enables highlighting of related return values while the cursor is on any `match`, `if`, or match arm arrow (`=>`).", + "markdownDescription": "Highlight related return values while the cursor is on any `match`, `if`, or match arm\narrow (`=>`).", "default": true, "type": "boolean" } } }, { - "title": "highlightRelated", + "title": "Highlight Related", "properties": { "rust-analyzer.highlightRelated.breakPoints.enable": { - "markdownDescription": "Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.", + "markdownDescription": "Highlight related references while the cursor is on `break`, `loop`, `while`, or `for`\nkeywords.", "default": true, "type": "boolean" } } }, { - "title": "highlightRelated", + "title": "Highlight Related", "properties": { "rust-analyzer.highlightRelated.closureCaptures.enable": { - "markdownDescription": "Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.", + "markdownDescription": "Highlight all captures of a closure while the cursor is on the `|` or move keyword of a closure.", "default": true, "type": "boolean" } } }, { - "title": "highlightRelated", + "title": "Highlight Related", "properties": { "rust-analyzer.highlightRelated.exitPoints.enable": { - "markdownDescription": "Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).", + "markdownDescription": "Highlight all exit points while the cursor is on any `return`, `?`, `fn`, or return type\narrow (`->`).", "default": true, "type": "boolean" } } }, { - "title": "highlightRelated", + "title": "Highlight Related", "properties": { "rust-analyzer.highlightRelated.references.enable": { - "markdownDescription": "Enables highlighting of related references while the cursor is on any identifier.", + "markdownDescription": "Highlight related references while the cursor is on any identifier.", "default": true, "type": "boolean" } } }, { - "title": "highlightRelated", + "title": "Highlight Related", "properties": { "rust-analyzer.highlightRelated.yieldPoints.enable": { - "markdownDescription": "Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.", + "markdownDescription": "Highlight all break points for a loop or block context while the cursor is on any\n`async` or `await` keywords.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.debug.enable": { - "markdownDescription": "Whether to show `Debug` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.", + "markdownDescription": "Show `Debug` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.enable": { - "markdownDescription": "Whether to show HoverActions in Rust files.", + "markdownDescription": "Show HoverActions in Rust files.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.gotoTypeDef.enable": { - "markdownDescription": "Whether to show `Go to Type Definition` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.", + "markdownDescription": "Show `Go to Type Definition` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.implementations.enable": { - "markdownDescription": "Whether to show `Implementations` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.", + "markdownDescription": "Show `Implementations` action. Only applies when `#rust-analyzer.hover.actions.enable#`\nis set.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.references.enable": { - "markdownDescription": "Whether to show `References` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.", + "markdownDescription": "Show `References` action. Only applies when `#rust-analyzer.hover.actions.enable#` is\nset.", "default": false, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.run.enable": { - "markdownDescription": "Whether to show `Run` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.", + "markdownDescription": "Show `Run` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.updateTest.enable": { - "markdownDescription": "Whether to show `Update Test` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` and `#rust-analyzer.hover.actions.run.enable#` are set.", + "markdownDescription": "Show `Update Test` action. Only applies when `#rust-analyzer.hover.actions.enable#` and\n`#rust-analyzer.hover.actions.run.enable#` are set.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.documentation.enable": { - "markdownDescription": "Whether to show documentation on hover.", + "markdownDescription": "Show documentation on hover.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.documentation.keywords.enable": { - "markdownDescription": "Whether to show keyword hover popups. Only applies when\n`#rust-analyzer.hover.documentation.enable#` is set.", + "markdownDescription": "Show keyword hover popups. Only applies when\n`#rust-analyzer.hover.documentation.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.dropGlue.enable": { - "markdownDescription": "Whether to show drop glue information on hover.", + "markdownDescription": "Show drop glue information on hover.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.links.enable": { "markdownDescription": "Use markdown syntax for links on hover.", @@ -1700,10 +1787,10 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.maxSubstitutionLength": { - "markdownDescription": "Whether to show what types are used as generic arguments in calls etc. on hover, and what is their max length to show such types, beyond it they will be shown with ellipsis.\n\nThis can take three values: `null` means \"unlimited\", the string `\"hide\"` means to not show generic substitutions at all, and a number means to limit them to X characters.\n\nThe default is 20 characters.", + "markdownDescription": "Show what types are used as generic arguments in calls etc. on hover, and limit the max\nlength to show such types, beyond which they will be shown with ellipsis.\n\nThis can take three values: `null` means \"unlimited\", the string `\"hide\"` means to not\nshow generic substitutions at all, and a number means to limit them to X characters.\n\nThe default is 20 characters.", "default": 20, "anyOf": [ { @@ -1723,7 +1810,7 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.memoryLayout.alignment": { "markdownDescription": "How to render the align information in a memory layout hover.", @@ -1750,17 +1837,17 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.memoryLayout.enable": { - "markdownDescription": "Whether to show memory layout data on hover.", + "markdownDescription": "Show memory layout data on hover.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.memoryLayout.niches": { "markdownDescription": "How to render the niche information in a memory layout hover.", @@ -1773,7 +1860,7 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.memoryLayout.offset": { "markdownDescription": "How to render the offset information in a memory layout hover.", @@ -1800,7 +1887,7 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.memoryLayout.padding": { "markdownDescription": "How to render the padding information in a memory layout hover.", @@ -1827,7 +1914,7 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.memoryLayout.size": { "markdownDescription": "How to render the size information in a memory layout hover.", @@ -1854,7 +1941,7 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.show.enumVariants": { "markdownDescription": "How many variants of an enum to display when hovering on. Show none if empty.", @@ -1868,10 +1955,10 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.show.fields": { - "markdownDescription": "How many fields of a struct, variant or union to display when hovering on. Show none if empty.", + "markdownDescription": "How many fields of a struct, variant or union to display when hovering on. Show none if\nempty.", "default": 5, "type": [ "null", @@ -1882,7 +1969,7 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.show.traitAssocItems": { "markdownDescription": "How many associated items of a trait to display when hovering a trait.", @@ -1896,17 +1983,17 @@ } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.granularity.enforce": { - "markdownDescription": "Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.", + "markdownDescription": "Enforce the import granularity setting for all files. If set to false rust-analyzer will\ntry to keep import styles consistent per file.", "default": false, "type": "boolean" } } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.granularity.group": { "markdownDescription": "How imports should be grouped into use statements.", @@ -1930,27 +2017,27 @@ } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.group.enable": { - "markdownDescription": "Group inserted imports by the [following order](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are separated by newlines.", + "markdownDescription": "Group inserted imports by the [following\norder](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are\nseparated by newlines.", "default": true, "type": "boolean" } } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.merge.glob": { - "markdownDescription": "Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.", + "markdownDescription": "Allow import insertion to merge new imports into single path glob imports like `use\nstd::fmt::*;`.", "default": true, "type": "boolean" } } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.preferNoStd": { "markdownDescription": "Prefer to unconditionally use imports of the core and alloc crate, over the std crate.", @@ -1960,17 +2047,17 @@ } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.preferPrelude": { - "markdownDescription": "Whether to prefer import paths containing a `prelude` module.", + "markdownDescription": "Prefer import paths containing a `prelude` module.", "default": false, "type": "boolean" } } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.prefix": { "markdownDescription": "The path structure for newly inserted paths to use.", @@ -1990,47 +2077,47 @@ } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.prefixExternPrelude": { - "markdownDescription": "Whether to prefix external (including std, core) crate imports with `::`. e.g. \"use ::std::io::Read;\".", + "markdownDescription": "Prefix external (including std, core) crate imports with `::`.\n\nE.g. `use ::std::io::Read;`.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.bindingModeHints.enable": { - "markdownDescription": "Whether to show inlay type hints for binding modes.", + "markdownDescription": "Show inlay type hints for binding modes.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.chainingHints.enable": { - "markdownDescription": "Whether to show inlay type hints for method chains.", + "markdownDescription": "Show inlay type hints for method chains.", "default": true, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.closingBraceHints.enable": { - "markdownDescription": "Whether to show inlay hints after a closing `}` to indicate what item it belongs to.", + "markdownDescription": "Show inlay hints after a closing `}` to indicate what item it belongs to.", "default": true, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.closingBraceHints.minLines": { "markdownDescription": "Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1\nto always show them).", @@ -2041,20 +2128,20 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.closureCaptureHints.enable": { - "markdownDescription": "Whether to show inlay hints for closure captures.", + "markdownDescription": "Show inlay hints for closure captures.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.closureReturnTypeHints.enable": { - "markdownDescription": "Whether to show inlay type hints for return types of closures.", + "markdownDescription": "Show inlay type hints for return types of closures.", "default": "never", "type": "string", "enum": [ @@ -2071,7 +2158,7 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.closureStyle": { "markdownDescription": "Closure notation in type and chaining inlay hints.", @@ -2093,10 +2180,10 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.discriminantHints.enable": { - "markdownDescription": "Whether to show enum variant discriminant hints.", + "markdownDescription": "Show enum variant discriminant hints.", "default": "never", "type": "string", "enum": [ @@ -2113,10 +2200,10 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.expressionAdjustmentHints.enable": { - "markdownDescription": "Whether to show inlay hints for type adjustments.", + "markdownDescription": "Show inlay hints for type adjustments.", "default": "never", "type": "string", "enum": [ @@ -2133,20 +2220,20 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.expressionAdjustmentHints.hideOutsideUnsafe": { - "markdownDescription": "Whether to hide inlay hints for type adjustments outside of `unsafe` blocks.", + "markdownDescription": "Hide inlay hints for type adjustments outside of `unsafe` blocks.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.expressionAdjustmentHints.mode": { - "markdownDescription": "Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).", + "markdownDescription": "Show inlay hints as postfix ops (`.*` instead of `*`, etc).", "default": "prefix", "type": "string", "enum": [ @@ -2165,60 +2252,60 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.genericParameterHints.const.enable": { - "markdownDescription": "Whether to show const generic parameter name inlay hints.", + "markdownDescription": "Show const generic parameter name inlay hints.", "default": true, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.genericParameterHints.lifetime.enable": { - "markdownDescription": "Whether to show generic lifetime parameter name inlay hints.", + "markdownDescription": "Show generic lifetime parameter name inlay hints.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.genericParameterHints.type.enable": { - "markdownDescription": "Whether to show generic type parameter name inlay hints.", + "markdownDescription": "Show generic type parameter name inlay hints.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.implicitDrops.enable": { - "markdownDescription": "Whether to show implicit drop hints.", + "markdownDescription": "Show implicit drop hints.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.implicitSizedBoundHints.enable": { - "markdownDescription": "Whether to show inlay hints for the implied type parameter `Sized` bound.", + "markdownDescription": "Show inlay hints for the implied type parameter `Sized` bound.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.lifetimeElisionHints.enable": { - "markdownDescription": "Whether to show inlay type hints for elided lifetimes in function signatures.", + "markdownDescription": "Show inlay type hints for elided lifetimes in function signatures.", "default": "never", "type": "string", "enum": [ @@ -2235,17 +2322,17 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames": { - "markdownDescription": "Whether to prefer using parameter names as the name for elided lifetime hints if possible.", + "markdownDescription": "Prefer using parameter names as the name for elided lifetime hints if possible.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.maxLength": { "markdownDescription": "Maximum length for inlay hints. Set to null to have an unlimited length.", @@ -2259,30 +2346,30 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.parameterHints.enable": { - "markdownDescription": "Whether to show function parameter name inlay hints at the call\nsite.", + "markdownDescription": "Show function parameter name inlay hints at the call site.", "default": true, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.rangeExclusiveHints.enable": { - "markdownDescription": "Whether to show exclusive range inlay hints.", + "markdownDescription": "Show exclusive range inlay hints.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.reborrowHints.enable": { - "markdownDescription": "Whether to show inlay hints for compiler inserted reborrows.\nThis setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.", + "markdownDescription": "Show inlay hints for compiler inserted reborrows.\n\nThis setting is deprecated in favor of\n#rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.", "default": "never", "type": "string", "enum": [ @@ -2299,7 +2386,7 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.renderColons": { "markdownDescription": "Whether to render leading colons for type hints, and trailing colons for parameter hints.", @@ -2309,57 +2396,57 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.typeHints.enable": { - "markdownDescription": "Whether to show inlay type hints for variables.", + "markdownDescription": "Show inlay type hints for variables.", "default": true, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.typeHints.hideClosureInitialization": { - "markdownDescription": "Whether to hide inlay type hints for `let` statements that initialize to a closure.\nOnly applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.", + "markdownDescription": "Hide inlay type hints for `let` statements that initialize to a closure.\n\nOnly applies to closures with blocks, same as\n`#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.typeHints.hideClosureParameter": { - "markdownDescription": "Whether to hide inlay parameter type hints for closures.", + "markdownDescription": "Hide inlay parameter type hints for closures.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.typeHints.hideNamedConstructor": { - "markdownDescription": "Whether to hide inlay type hints for constructors.", + "markdownDescription": "Hide inlay type hints for constructors.", "default": false, "type": "boolean" } } }, { - "title": "interpret", + "title": "Interpret", "properties": { "rust-analyzer.interpret.tests": { - "markdownDescription": "Enables the experimental support for interpreting tests.", + "markdownDescription": "Enable the experimental support for interpreting tests.", "default": false, "type": "boolean" } } }, { - "title": "joinLines", + "title": "Join Lines", "properties": { "rust-analyzer.joinLines.joinAssignments": { "markdownDescription": "Join lines merges consecutive declaration and initialization of an assignment.", @@ -2369,7 +2456,7 @@ } }, { - "title": "joinLines", + "title": "Join Lines", "properties": { "rust-analyzer.joinLines.joinElseIf": { "markdownDescription": "Join lines inserts else between consecutive ifs.", @@ -2379,7 +2466,7 @@ } }, { - "title": "joinLines", + "title": "Join Lines", "properties": { "rust-analyzer.joinLines.removeTrailingComma": { "markdownDescription": "Join lines removes trailing commas.", @@ -2389,7 +2476,7 @@ } }, { - "title": "joinLines", + "title": "Join Lines", "properties": { "rust-analyzer.joinLines.unwrapTrivialBlock": { "markdownDescription": "Join lines unwraps trivial blocks.", @@ -2399,37 +2486,37 @@ } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.debug.enable": { - "markdownDescription": "Whether to show `Debug` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `Debug` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.enable": { - "markdownDescription": "Whether to show CodeLens in Rust files.", + "markdownDescription": "Show CodeLens in Rust files.", "default": true, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.implementations.enable": { - "markdownDescription": "Whether to show `Implementations` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `Implementations` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.location": { "markdownDescription": "Where to render annotations.", @@ -2447,70 +2534,70 @@ } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.references.adt.enable": { - "markdownDescription": "Whether to show `References` lens for Struct, Enum, and Union.\nOnly applies when `#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `References` lens for Struct, Enum, and Union. Only applies when\n`#rust-analyzer.lens.enable#` is set.", "default": false, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.references.enumVariant.enable": { - "markdownDescription": "Whether to show `References` lens for Enum Variants.\nOnly applies when `#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `References` lens for Enum Variants. Only applies when\n`#rust-analyzer.lens.enable#` is set.", "default": false, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.references.method.enable": { - "markdownDescription": "Whether to show `Method References` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `Method References` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", "default": false, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.references.trait.enable": { - "markdownDescription": "Whether to show `References` lens for Trait.\nOnly applies when `#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `References` lens for Trait. Only applies when `#rust-analyzer.lens.enable#` is\nset.", "default": false, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.run.enable": { - "markdownDescription": "Whether to show `Run` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `Run` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.updateTest.enable": { - "markdownDescription": "Whether to show `Update Test` lens. Only applies when\n`#rust-analyzer.lens.enable#` and `#rust-analyzer.lens.run.enable#` are set.", + "markdownDescription": "Show `Update Test` lens. Only applies when `#rust-analyzer.lens.enable#` and\n`#rust-analyzer.lens.run.enable#` are set.", "default": true, "type": "boolean" } } }, { - "title": "general", + "title": "rust-analyzer", "properties": { "rust-analyzer.linkedProjects": { - "markdownDescription": "Disable project auto-discovery in favor of explicitly specified set\nof projects.\n\nElements must be paths pointing to `Cargo.toml`,\n`rust-project.json`, `.rs` files (which will be treated as standalone files) or JSON\nobjects in `rust-project.json` format.", + "markdownDescription": "Disable project auto-discovery in favor of explicitly specified set of projects.\n\nElements must be paths pointing to `Cargo.toml`, `rust-project.json`, `.rs` files (which\nwill be treated as standalone files) or JSON objects in `rust-project.json` format.", "default": [], "type": "array", "items": { @@ -2523,7 +2610,7 @@ } }, { - "title": "lru", + "title": "Lru", "properties": { "rust-analyzer.lru.capacity": { "markdownDescription": "Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.", @@ -2538,30 +2625,30 @@ } }, { - "title": "lru", + "title": "Lru", "properties": { "rust-analyzer.lru.query.capacities": { - "markdownDescription": "Sets the LRU capacity of the specified queries.", + "markdownDescription": "The LRU capacity of the specified queries.", "default": {}, "type": "object" } } }, { - "title": "notifications", + "title": "Notifications", "properties": { "rust-analyzer.notifications.cargoTomlNotFound": { - "markdownDescription": "Whether to show `can't find Cargo.toml` error message.", + "markdownDescription": "Show `can't find Cargo.toml` error message.", "default": true, "type": "boolean" } } }, { - "title": "general", + "title": "rust-analyzer", "properties": { "rust-analyzer.numThreads": { - "markdownDescription": "How many worker threads in the main loop. The default `null` means to pick automatically.", + "markdownDescription": "The number of worker threads in the main loop. The default `null` means to pick\nautomatically.", "default": null, "anyOf": [ { @@ -2588,7 +2675,7 @@ } }, { - "title": "procMacro", + "title": "Proc Macro", "properties": { "rust-analyzer.procMacro.attributes.enable": { "markdownDescription": "Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.", @@ -2598,7 +2685,7 @@ } }, { - "title": "procMacro", + "title": "Proc Macro", "properties": { "rust-analyzer.procMacro.enable": { "markdownDescription": "Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.", @@ -2608,7 +2695,7 @@ } }, { - "title": "procMacro", + "title": "Proc Macro", "properties": { "rust-analyzer.procMacro.ignored": { "markdownDescription": "These proc-macros will be ignored when trying to expand them.\n\nThis config takes a map of crate names with the exported proc-macro names to ignore as values.", @@ -2618,7 +2705,7 @@ } }, { - "title": "procMacro", + "title": "Proc Macro", "properties": { "rust-analyzer.procMacro.server": { "markdownDescription": "Internal config, path to proc-macro server executable.", @@ -2631,7 +2718,7 @@ } }, { - "title": "references", + "title": "References", "properties": { "rust-analyzer.references.excludeImports": { "markdownDescription": "Exclude imports from find-all-references.", @@ -2641,7 +2728,7 @@ } }, { - "title": "references", + "title": "References", "properties": { "rust-analyzer.references.excludeTests": { "markdownDescription": "Exclude tests from find-all-references and call-hierarchy.", @@ -2651,7 +2738,7 @@ } }, { - "title": "runnables", + "title": "Runnables", "properties": { "rust-analyzer.runnables.command": { "markdownDescription": "Command to be executed instead of 'cargo' for runnables.", @@ -2664,7 +2751,7 @@ } }, { - "title": "runnables", + "title": "Runnables", "properties": { "rust-analyzer.runnables.extraArgs": { "markdownDescription": "Additional arguments to be passed to cargo for runnables such as\ntests or binaries. For example, it may be `--release`.", @@ -2677,7 +2764,7 @@ } }, { - "title": "runnables", + "title": "Runnables", "properties": { "rust-analyzer.runnables.extraTestBinaryArgs": { "markdownDescription": "Additional arguments to be passed through Cargo to launched tests, benchmarks, or\ndoc-tests.\n\nUnless the launched target uses a\n[custom test harness](https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-harness-field),\nthey will end up being interpreted as options to\n[`rustc`’s built-in test harness (“libtest”)](https://doc.rust-lang.org/rustc/tests/index.html#cli-arguments).", @@ -2692,7 +2779,7 @@ } }, { - "title": "rustc", + "title": "Rustc", "properties": { "rust-analyzer.rustc.source": { "markdownDescription": "Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private\nprojects, or \"discover\" to try to automatically find it if the `rustc-dev` component\nis installed.\n\nAny project which uses rust-analyzer with the rustcPrivate\ncrates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.\n\nThis option does not take effect until rust-analyzer is restarted.", @@ -2705,7 +2792,7 @@ } }, { - "title": "rustfmt", + "title": "Rustfmt", "properties": { "rust-analyzer.rustfmt.extraArgs": { "markdownDescription": "Additional arguments to `rustfmt`.", @@ -2718,10 +2805,10 @@ } }, { - "title": "rustfmt", + "title": "Rustfmt", "properties": { "rust-analyzer.rustfmt.overrideCommand": { - "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for\nformatting. This should be the equivalent of `rustfmt` here, and\nnot that of `cargo fmt`. The file contents will be passed on the\nstandard input and the formatted result will be read from the\nstandard output.", + "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for\nformatting. This should be the equivalent of `rustfmt` here, and\nnot that of `cargo fmt`. The file contents will be passed on the\nstandard input and the formatted result will be read from the\nstandard output.\n\nNote: The option must be specified as an array of command line arguments, with\nthe first argument being the name of the command to run.", "default": null, "type": [ "null", @@ -2734,7 +2821,7 @@ } }, { - "title": "rustfmt", + "title": "Rustfmt", "properties": { "rust-analyzer.rustfmt.rangeFormatting.enable": { "markdownDescription": "Enables the use of rustfmt's unstable range formatting command for the\n`textDocument/rangeFormatting` request. The rustfmt option is unstable and only\navailable on a nightly build.", @@ -2744,7 +2831,7 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.doc.comment.inject.enable": { "markdownDescription": "Inject additional highlighting into doc comments.\n\nWhen enabled, rust-analyzer will highlight rust source in doc comments as well as intra\ndoc links.", @@ -2754,17 +2841,17 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.nonStandardTokens": { - "markdownDescription": "Whether the server is allowed to emit non-standard tokens and modifiers.", + "markdownDescription": "Emit non-standard tokens and modifiers\n\nWhen enabled, rust-analyzer will emit tokens and modifiers that are not part of the\nstandard set of semantic tokens.", "default": true, "type": "boolean" } } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.operator.enable": { "markdownDescription": "Use semantic tokens for operators.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for operator tokens when\nthey are tagged with modifiers.", @@ -2774,7 +2861,7 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.operator.specialization.enable": { "markdownDescription": "Use specialized semantic tokens for operators.\n\nWhen enabled, rust-analyzer will emit special token types for operator tokens instead\nof the generic `operator` token type.", @@ -2784,7 +2871,7 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.punctuation.enable": { "markdownDescription": "Use semantic tokens for punctuation.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when\nthey are tagged with modifiers or have a special role.", @@ -2794,7 +2881,7 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang": { "markdownDescription": "When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro\ncalls.", @@ -2804,7 +2891,7 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.punctuation.specialization.enable": { "markdownDescription": "Use specialized semantic tokens for punctuation.\n\nWhen enabled, rust-analyzer will emit special token types for punctuation tokens instead\nof the generic `punctuation` token type.", @@ -2814,7 +2901,7 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.strings.enable": { "markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.", @@ -2824,7 +2911,7 @@ } }, { - "title": "signatureInfo", + "title": "Signature Info", "properties": { "rust-analyzer.signatureInfo.detail": { "markdownDescription": "Show full signature of the callable. Only shows parameters if disabled.", @@ -2842,7 +2929,7 @@ } }, { - "title": "signatureInfo", + "title": "Signature Info", "properties": { "rust-analyzer.signatureInfo.documentation.enable": { "markdownDescription": "Show documentation.", @@ -2852,10 +2939,10 @@ } }, { - "title": "typing", + "title": "Typing", "properties": { "rust-analyzer.typing.triggerChars": { - "markdownDescription": "Specify the characters allowed to invoke special on typing triggers.\n- typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing expression\n- typing `=` between two expressions adds `;` when in statement position\n- typing `=` to turn an assignment into an equality comparison removes `;` when in expression position\n- typing `.` in a chain method call auto-indents\n- typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the expression\n- typing `{` in a use item adds a closing `}` in the right place\n- typing `>` to complete a return type `->` will insert a whitespace after it\n- typing `<` in a path or type position inserts a closing `>` after the path or type.", + "markdownDescription": "Specify the characters allowed to invoke special on typing triggers.\n\n- typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing\n expression\n- typing `=` between two expressions adds `;` when in statement position\n- typing `=` to turn an assignment into an equality comparison removes `;` when in\n expression position\n- typing `.` in a chain method call auto-indents\n- typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the\n expression\n- typing `{` in a use item adds a closing `}` in the right place\n- typing `>` to complete a return type `->` will insert a whitespace after it\n- typing `<` in a path or type position inserts a closing `>` after the path or type.", "default": "=.", "type": [ "null", @@ -2865,7 +2952,7 @@ } }, { - "title": "vfs", + "title": "Vfs", "properties": { "rust-analyzer.vfs.extraIncludes": { "markdownDescription": "Additional paths to include in the VFS. Generally for code that is\ngenerated or otherwise managed by a build system outside of Cargo,\nthough Cargo might be the eventual consumer.", @@ -2878,10 +2965,10 @@ } }, { - "title": "workspace", + "title": "Workspace", "properties": { "rust-analyzer.workspace.discoverConfig": { - "markdownDescription": "Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].\n\n[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.\n`progress_label` is used for the title in progress indicators, whereas `files_to_watch`\nis used to determine which build system-specific files should be watched in order to\nreload rust-analyzer.\n\nBelow is an example of a valid configuration:\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n \"command\": [\n \"rust-project\",\n \"develop-json\"\n ],\n \"progressLabel\": \"rust-analyzer\",\n \"filesToWatch\": [\n \"BUCK\"\n ]\n}\n```\n\n## On `DiscoverWorkspaceConfig::command`\n\n**Warning**: This format is provisional and subject to change.\n\n[`DiscoverWorkspaceConfig::command`] *must* return a JSON object\ncorresponding to `DiscoverProjectData::Finished`:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n Error { error: String, source: Option },\n Progress { message: String },\n}\n```\n\nAs JSON, `DiscoverProjectData::Finished` is:\n\n```json\n{\n // the internally-tagged representation of the enum.\n \"kind\": \"finished\",\n // the file used by a non-Cargo build system to define\n // a package or target.\n \"buildfile\": \"rust-analyzer/BUILD\",\n // the contents of a rust-project.json, elided for brevity\n \"project\": {\n \"sysroot\": \"foo\",\n \"crates\": []\n }\n}\n```\n\nIt is encouraged, but not required, to use the other variants on\n`DiscoverProjectData` to provide a more polished end-user experience.\n\n`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`,\nwhich will be substituted with the JSON-serialized form of the following\nenum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n Path(AbsPathBuf),\n Buildfile(AbsPathBuf),\n}\n```\n\nThe JSON representation of `DiscoverArgument::Path` is:\n\n```json\n{\n \"path\": \"src/main.rs\"\n}\n```\n\nSimilarly, the JSON representation of `DiscoverArgument::Buildfile` is:\n\n```json\n{\n \"buildfile\": \"BUILD\"\n}\n```\n\n`DiscoverArgument::Path` is used to find and generate a `rust-project.json`,\nand therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to\nto update an existing workspace. As a reference for implementors,\nbuck2's `rust-project` will likely be useful:\nhttps://github.com/facebook/buck2/tree/main/integrations/rust-project.", + "markdownDescription": "Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].\n\n[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.\n`progress_label` is used for the title in progress indicators, whereas `files_to_watch`\nis used to determine which build system-specific files should be watched in order to\nreload rust-analyzer.\n\nBelow is an example of a valid configuration:\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n \"command\": [\n \"rust-project\",\n \"develop-json\"\n ],\n \"progressLabel\": \"rust-analyzer\",\n \"filesToWatch\": [\n \"BUCK\"\n ]\n}\n```\n\n## On `DiscoverWorkspaceConfig::command`\n\n**Warning**: This format is provisional and subject to change.\n\n[`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to\n`DiscoverProjectData::Finished`:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n Error { error: String, source: Option },\n Progress { message: String },\n}\n```\n\nAs JSON, `DiscoverProjectData::Finished` is:\n\n```json\n{\n // the internally-tagged representation of the enum.\n \"kind\": \"finished\",\n // the file used by a non-Cargo build system to define\n // a package or target.\n \"buildfile\": \"rust-analyzer/BUILD\",\n // the contents of a rust-project.json, elided for brevity\n \"project\": {\n \"sysroot\": \"foo\",\n \"crates\": []\n }\n}\n```\n\nIt is encouraged, but not required, to use the other variants on `DiscoverProjectData`\nto provide a more polished end-user experience.\n\n`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be\nsubstituted with the JSON-serialized form of the following enum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n Path(AbsPathBuf),\n Buildfile(AbsPathBuf),\n}\n```\n\nThe JSON representation of `DiscoverArgument::Path` is:\n\n```json\n{\n \"path\": \"src/main.rs\"\n}\n```\n\nSimilarly, the JSON representation of `DiscoverArgument::Buildfile` is:\n\n```json\n{\n \"buildfile\": \"BUILD\"\n}\n```\n\n`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and\ntherefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an\nexisting workspace. As a reference for implementors, buck2's `rust-project` will likely\nbe useful: https://github.com/facebook/buck2/tree/main/integrations/rust-project.", "default": null, "anyOf": [ { @@ -2912,7 +2999,7 @@ } }, { - "title": "workspace", + "title": "Workspace", "properties": { "rust-analyzer.workspace.symbol.search.excludeImports": { "markdownDescription": "Exclude all imports from workspace symbol search.\n\nIn addition to regular imports (which are always excluded),\nthis option removes public imports (better known as re-exports)\nand removes imports that rename the imported symbol.", @@ -2922,7 +3009,7 @@ } }, { - "title": "workspace", + "title": "Workspace", "properties": { "rust-analyzer.workspace.symbol.search.kind": { "markdownDescription": "Workspace symbol search kind.", @@ -2940,7 +3027,7 @@ } }, { - "title": "workspace", + "title": "Workspace", "properties": { "rust-analyzer.workspace.symbol.search.limit": { "markdownDescription": "Limits the number of items returned from a workspace symbol search (Defaults to 128).\nSome clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.\nOther clients requires all results upfront and might require a higher limit.", @@ -2951,7 +3038,7 @@ } }, { - "title": "workspace", + "title": "Workspace", "properties": { "rust-analyzer.workspace.symbol.search.scope": { "markdownDescription": "Workspace symbol search scope.", diff --git a/src/tools/rust-analyzer/triagebot.toml b/src/tools/rust-analyzer/triagebot.toml index 27fdb672455bc..c9862495bc0c6 100644 --- a/src/tools/rust-analyzer/triagebot.toml +++ b/src/tools/rust-analyzer/triagebot.toml @@ -28,6 +28,3 @@ labels = ["has-merge-commits", "S-waiting-on-author"] # Prevents mentions in commits to avoid users being spammed [no-mentions] - -# Automatically close and reopen PRs made by bots to run CI on them -[bot-pull-requests] diff --git a/src/tools/rust-analyzer/xtask/src/codegen.rs b/src/tools/rust-analyzer/xtask/src/codegen.rs index 19ca62e8a3290..bc7eb88f3a848 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen.rs @@ -173,11 +173,11 @@ fn add_preamble(cg: CodegenType, mut text: String) -> String { #[allow(clippy::print_stderr)] fn ensure_file_contents(cg: CodegenType, file: &Path, contents: &str, check: bool) -> bool { let contents = normalize_newlines(contents); - if let Ok(old_contents) = fs::read_to_string(file) { - if normalize_newlines(&old_contents) == contents { - // File is already up to date. - return false; - } + if let Ok(old_contents) = fs::read_to_string(file) + && normalize_newlines(&old_contents) == contents + { + // File is already up to date. + return false; } let display_path = file.strip_prefix(project_root()).unwrap_or(file); diff --git a/src/tools/rust-analyzer/xtask/src/publish/notes.rs b/src/tools/rust-analyzer/xtask/src/publish/notes.rs index 93592d4986f8a..8d36fcb61b44c 100644 --- a/src/tools/rust-analyzer/xtask/src/publish/notes.rs +++ b/src/tools/rust-analyzer/xtask/src/publish/notes.rs @@ -72,13 +72,13 @@ impl<'a, 'b, R: BufRead> Converter<'a, 'b, R> { } fn process_document_title(&mut self) -> anyhow::Result<()> { - if let Some(Ok(line)) = self.iter.next() { - if let Some((level, title)) = get_title(&line) { - let title = process_inline_macros(title)?; - if level == 1 { - self.write_title(level, &title); - return Ok(()); - } + if let Some(Ok(line)) = self.iter.next() + && let Some((level, title)) = get_title(&line) + { + let title = process_inline_macros(title)?; + if level == 1 { + self.write_title(level, &title); + return Ok(()); } } bail!("document title not found") @@ -141,39 +141,39 @@ impl<'a, 'b, R: BufRead> Converter<'a, 'b, R> { } fn process_source_code_block(&mut self, level: usize) -> anyhow::Result<()> { - if let Some(Ok(line)) = self.iter.next() { - if let Some(styles) = line.strip_prefix("[source").and_then(|s| s.strip_suffix(']')) { - let mut styles = styles.split(','); - if !styles.next().unwrap().is_empty() { - bail!("not a source code block"); - } - let language = styles.next(); - return self.process_listing_block(language, level); + if let Some(Ok(line)) = self.iter.next() + && let Some(styles) = line.strip_prefix("[source").and_then(|s| s.strip_suffix(']')) + { + let mut styles = styles.split(','); + if !styles.next().unwrap().is_empty() { + bail!("not a source code block"); } + let language = styles.next(); + return self.process_listing_block(language, level); } bail!("not a source code block") } fn process_listing_block(&mut self, style: Option<&str>, level: usize) -> anyhow::Result<()> { - if let Some(Ok(line)) = self.iter.next() { - if line == LISTING_DELIMITER { - self.write_indent(level); - self.output.push_str("```"); - if let Some(style) = style { - self.output.push_str(style); - } - self.output.push('\n'); - while let Some(line) = self.iter.next() { - let line = line?; - if line == LISTING_DELIMITER { - self.write_line("```", level); - return Ok(()); - } else { - self.write_line(&line, level); - } + if let Some(Ok(line)) = self.iter.next() + && line == LISTING_DELIMITER + { + self.write_indent(level); + self.output.push_str("```"); + if let Some(style) = style { + self.output.push_str(style); + } + self.output.push('\n'); + while let Some(line) = self.iter.next() { + let line = line?; + if line == LISTING_DELIMITER { + self.write_line("```", level); + return Ok(()); + } else { + self.write_line(&line, level); } - bail!("listing block is not terminated") } + bail!("listing block is not terminated") } bail!("not a listing block") } @@ -200,49 +200,48 @@ impl<'a, 'b, R: BufRead> Converter<'a, 'b, R> { } fn process_image_block(&mut self, caption: Option<&str>, level: usize) -> anyhow::Result<()> { - if let Some(Ok(line)) = self.iter.next() { - if let Some((url, attrs)) = parse_media_block(&line, IMAGE_BLOCK_PREFIX) { - let alt = if let Some(stripped) = - attrs.strip_prefix('"').and_then(|s| s.strip_suffix('"')) - { + if let Some(Ok(line)) = self.iter.next() + && let Some((url, attrs)) = parse_media_block(&line, IMAGE_BLOCK_PREFIX) + { + let alt = + if let Some(stripped) = attrs.strip_prefix('"').and_then(|s| s.strip_suffix('"')) { stripped } else { attrs }; - if let Some(caption) = caption { - self.write_caption_line(caption, level); - } - self.write_indent(level); - self.output.push_str("!["); - self.output.push_str(alt); - self.output.push_str("]("); - self.output.push_str(url); - self.output.push_str(")\n"); - return Ok(()); + if let Some(caption) = caption { + self.write_caption_line(caption, level); } + self.write_indent(level); + self.output.push_str("!["); + self.output.push_str(alt); + self.output.push_str("]("); + self.output.push_str(url); + self.output.push_str(")\n"); + return Ok(()); } bail!("not a image block") } fn process_video_block(&mut self, caption: Option<&str>, level: usize) -> anyhow::Result<()> { - if let Some(Ok(line)) = self.iter.next() { - if let Some((url, attrs)) = parse_media_block(&line, VIDEO_BLOCK_PREFIX) { - let html_attrs = match attrs { - "options=loop" => "controls loop", - r#"options="autoplay,loop""# => "autoplay controls loop", - _ => bail!("unsupported video syntax"), - }; - if let Some(caption) = caption { - self.write_caption_line(caption, level); - } - self.write_indent(level); - self.output.push_str(r#"\n"); - return Ok(()); + if let Some(Ok(line)) = self.iter.next() + && let Some((url, attrs)) = parse_media_block(&line, VIDEO_BLOCK_PREFIX) + { + let html_attrs = match attrs { + "options=loop" => "controls loop", + r#"options="autoplay,loop""# => "autoplay controls loop", + _ => bail!("unsupported video syntax"), + }; + if let Some(caption) = caption { + self.write_caption_line(caption, level); } + self.write_indent(level); + self.output.push_str(r#"\n"); + return Ok(()); } bail!("not a video block") } @@ -371,12 +370,11 @@ fn strip_prefix_symbol(line: &str, symbol: char) -> Option<(usize, &str)> { } fn parse_media_block<'a>(line: &'a str, prefix: &str) -> Option<(&'a str, &'a str)> { - if let Some(line) = line.strip_prefix(prefix) { - if let Some((url, rest)) = line.split_once('[') { - if let Some(attrs) = rest.strip_suffix(']') { - return Some((url, attrs)); - } - } + if let Some(line) = line.strip_prefix(prefix) + && let Some((url, rest)) = line.split_once('[') + && let Some(attrs) = rest.strip_suffix(']') + { + return Some((url, attrs)); } None }