From 34ddd59e4544e77464fd6230bc354d0753987800 Mon Sep 17 00:00:00 2001 From: Josh McKinney Date: Wed, 2 Jul 2025 22:09:04 -0700 Subject: [PATCH 001/118] Improve settings tree title and descriptions - All settings are now phrased in the imperative form stating what the setting does rather than talking about what it controls. (E.g.: "Show `Debug` action." instead of "Whether to show `Debug` action" - Categories are now displayed in title case - Categories are now sorted lexicographically - General category is removed (and all the settings are moved to the top level) - Language for a few descriptions is made a bit less ambiguous --- .../crates/rust-analyzer/src/config.rs | 592 ++++++++++------ .../docs/book/src/configuration_generated.md | 292 ++++---- .../rust-analyzer/editors/code/package.json | 639 ++++++++++-------- 3 files changed, 899 insertions(+), 624 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index e716d14075221..3f700862280f2 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -61,233 +61,312 @@ pub enum MaxSubstitutionLength { Limit(usize), } -// Defines the server-side configuration of the rust-analyzer. We generate -// *parts* of VS Code's `package.json` config from this. Run `cargo test` to -// re-generate that file. +// Defines the server-side configuration of the rust-analyzer. We generate *parts* of VS Code's +// `package.json` config from this. Run `cargo test` to re-generate that file. // -// However, editor specific config, which the server doesn't know about, should -// be specified directly in `package.json`. +// However, editor specific config, which the server doesn't know about, should be specified +// directly in `package.json`. // -// To deprecate an option by replacing it with another name use `new_name` | `old_name` so that we keep -// parsing the old name. +// To deprecate an option by replacing it with another name use `new_name` | `old_name` so that we +// keep parsing the old name. config_data! { - /// Configs that apply on a workspace-wide scope. There are 2 levels on which a global configuration can be configured + /// Configs that apply on a workspace-wide scope. There are 2 levels on which a global + /// configuration can be configured /// - /// 1. `rust-analyzer.toml` file under user's config directory (e.g ~/.config/rust-analyzer/rust-analyzer.toml) + /// 1. `rust-analyzer.toml` file under user's config directory (e.g + /// ~/.config/rust-analyzer/rust-analyzer.toml) /// 2. Client's own configurations (e.g `settings.json` on VS Code) /// - /// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen by the nearest first principle. + /// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen + /// by the nearest first principle. global: struct GlobalDefaultConfigData <- GlobalConfigInput -> { /// Warm up caches on project load. cachePriming_enable: bool = true, - /// How many worker threads to handle priming caches. The default `0` means to pick automatically. + + /// How many worker threads to handle priming caches. The default `0` means to pick + /// automatically. cachePriming_numThreads: NumThreads = NumThreads::Physical, /// Custom completion snippets. - completion_snippets_custom: FxIndexMap = Config::completion_snippets_default(), - + completion_snippets_custom: FxIndexMap = + Config::completion_snippets_default(), - /// These paths (file/directories) will be ignored by rust-analyzer. They are - /// relative to the workspace root, and globs are not supported. You may - /// also need to add the folders to Code's `files.watcherExclude`. + /// List of files to ignore + /// + /// These paths (file/directories) will be ignored by rust-analyzer. They are relative to + /// the workspace root, and globs are not supported. You may also need to add the folders to + /// Code's `files.watcherExclude`. files_exclude | files_excludeDirs: Vec = vec![], - - - /// Enables highlighting of related return values while the cursor is on any `match`, `if`, or match arm arrow (`=>`). + /// Highlight related return values while the cursor is on any `match`, `if`, or match arm + /// arrow (`=>`). highlightRelated_branchExitPoints_enable: bool = true, - /// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords. + + /// Highlight related references while the cursor is on `break`, `loop`, `while`, or `for` + /// keywords. highlightRelated_breakPoints_enable: bool = true, - /// Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure. + + /// Highlight all captures of a closure while the cursor is on the `|` or move keyword of a closure. highlightRelated_closureCaptures_enable: bool = true, - /// Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`). + + /// Highlight all exit points while the cursor is on any `return`, `?`, `fn`, or return type + /// arrow (`->`). highlightRelated_exitPoints_enable: bool = true, - /// Enables highlighting of related references while the cursor is on any identifier. + + /// Highlight related references while the cursor is on any identifier. highlightRelated_references_enable: bool = true, - /// Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords. + + /// Highlight all break points for a loop or block context while the cursor is on any + /// `async` or `await` keywords. highlightRelated_yieldPoints_enable: bool = true, - /// Whether to show `Debug` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` is set. - hover_actions_debug_enable: bool = true, - /// Whether to show HoverActions in Rust files. - hover_actions_enable: bool = true, - /// Whether to show `Go to Type Definition` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` is set. - hover_actions_gotoTypeDef_enable: bool = true, - /// Whether to show `Implementations` action. Only applies when + /// Show `Debug` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set. + hover_actions_debug_enable: bool = true, + + /// Show HoverActions in Rust files. + hover_actions_enable: bool = true, + + /// Show `Go to Type Definition` action. Only applies when /// `#rust-analyzer.hover.actions.enable#` is set. + hover_actions_gotoTypeDef_enable: bool = true, + + /// Show `Implementations` action. Only applies when `#rust-analyzer.hover.actions.enable#` + /// is set. hover_actions_implementations_enable: bool = true, - /// Whether to show `References` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` is set. - hover_actions_references_enable: bool = false, - /// Whether to show `Run` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` is set. - hover_actions_run_enable: bool = true, - /// Whether to show `Update Test` action. Only applies when - /// `#rust-analyzer.hover.actions.enable#` and `#rust-analyzer.hover.actions.run.enable#` are set. - hover_actions_updateTest_enable: bool = true, - - /// Whether to show documentation on hover. - hover_documentation_enable: bool = true, - /// Whether to show keyword hover popups. Only applies when + + /// Show `References` action. Only applies when `#rust-analyzer.hover.actions.enable#` is + /// set. + hover_actions_references_enable: bool = false, + + /// Show `Run` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set. + hover_actions_run_enable: bool = true, + + /// Show `Update Test` action. Only applies when `#rust-analyzer.hover.actions.enable#` and + /// `#rust-analyzer.hover.actions.run.enable#` are set. + hover_actions_updateTest_enable: bool = true, + + /// Show documentation on hover. + hover_documentation_enable: bool = true, + + /// Show keyword hover popups. Only applies when /// `#rust-analyzer.hover.documentation.enable#` is set. - hover_documentation_keywords_enable: bool = true, - /// Whether to show drop glue information on hover. - hover_dropGlue_enable: bool = true, + hover_documentation_keywords_enable: bool = true, + + /// Show drop glue information on hover. + hover_dropGlue_enable: bool = true, + /// Use markdown syntax for links on hover. hover_links_enable: bool = true, - /// Whether to show what types are used as generic arguments in calls etc. on hover, and what is their max length to show such types, beyond it they will be shown with ellipsis. + + /// Show what types are used as generic arguments in calls etc. on hover, and limit the max + /// length to show such types, beyond which they will be shown with ellipsis. /// - /// This can take three values: `null` means "unlimited", the string `"hide"` means to not show generic substitutions at all, and a number means to limit them to X characters. + /// This can take three values: `null` means "unlimited", the string `"hide"` means to not + /// show generic substitutions at all, and a number means to limit them to X characters. /// /// The default is 20 characters. - hover_maxSubstitutionLength: Option = Some(MaxSubstitutionLength::Limit(20)), + hover_maxSubstitutionLength: Option = + Some(MaxSubstitutionLength::Limit(20)), + /// How to render the align information in a memory layout hover. - hover_memoryLayout_alignment: Option = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal), - /// Whether to show memory layout data on hover. + hover_memoryLayout_alignment: Option = + Some(MemoryLayoutHoverRenderKindDef::Hexadecimal), + + /// Show memory layout data on hover. hover_memoryLayout_enable: bool = true, + /// How to render the niche information in a memory layout hover. hover_memoryLayout_niches: Option = Some(false), + /// How to render the offset information in a memory layout hover. - hover_memoryLayout_offset: Option = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal), + hover_memoryLayout_offset: Option = + Some(MemoryLayoutHoverRenderKindDef::Hexadecimal), + /// How to render the padding information in a memory layout hover. hover_memoryLayout_padding: Option = None, + /// How to render the size information in a memory layout hover. - hover_memoryLayout_size: Option = Some(MemoryLayoutHoverRenderKindDef::Both), + hover_memoryLayout_size: Option = + Some(MemoryLayoutHoverRenderKindDef::Both), /// How many variants of an enum to display when hovering on. Show none if empty. hover_show_enumVariants: Option = Some(5), - /// How many fields of a struct, variant or union to display when hovering on. Show none if empty. + + /// How many fields of a struct, variant or union to display when hovering on. Show none if + /// empty. hover_show_fields: Option = Some(5), + /// How many associated items of a trait to display when hovering a trait. hover_show_traitAssocItems: Option = None, - /// Whether to show inlay type hints for binding modes. - inlayHints_bindingModeHints_enable: bool = false, - /// Whether to show inlay type hints for method chains. - inlayHints_chainingHints_enable: bool = true, - /// Whether to show inlay hints after a closing `}` to indicate what item it belongs to. - inlayHints_closingBraceHints_enable: bool = true, + /// Show inlay type hints for binding modes. + inlayHints_bindingModeHints_enable: bool = false, + + /// Show inlay type hints for method chains. + inlayHints_chainingHints_enable: bool = true, + + /// Show inlay hints after a closing `}` to indicate what item it belongs to. + inlayHints_closingBraceHints_enable: bool = true, + /// Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1 /// to always show them). - inlayHints_closingBraceHints_minLines: usize = 25, - /// Whether to show inlay hints for closure captures. - inlayHints_closureCaptureHints_enable: bool = false, - /// Whether to show inlay type hints for return types of closures. - inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = ClosureReturnTypeHintsDef::Never, + inlayHints_closingBraceHints_minLines: usize = 25, + + /// Show inlay hints for closure captures. + inlayHints_closureCaptureHints_enable: bool = false, + + /// Show inlay type hints for return types of closures. + inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = + ClosureReturnTypeHintsDef::Never, + /// Closure notation in type and chaining inlay hints. - inlayHints_closureStyle: ClosureStyle = ClosureStyle::ImplFn, - /// Whether to show enum variant discriminant hints. - inlayHints_discriminantHints_enable: DiscriminantHintsDef = DiscriminantHintsDef::Never, - /// Whether to show inlay hints for type adjustments. - inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = AdjustmentHintsDef::Never, - /// Whether to hide inlay hints for type adjustments outside of `unsafe` blocks. + inlayHints_closureStyle: ClosureStyle = ClosureStyle::ImplFn, + + /// Show enum variant discriminant hints. + inlayHints_discriminantHints_enable: DiscriminantHintsDef = + DiscriminantHintsDef::Never, + + /// Show inlay hints for type adjustments. + inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = + AdjustmentHintsDef::Never, + + /// Hide inlay hints for type adjustments outside of `unsafe` blocks. inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = false, - /// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). - inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = AdjustmentHintsModeDef::Prefix, - /// Whether to show const generic parameter name inlay hints. - inlayHints_genericParameterHints_const_enable: bool= true, - /// Whether to show generic lifetime parameter name inlay hints. + + /// Show inlay hints as postfix ops (`.*` instead of `*`, etc). + inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = + AdjustmentHintsModeDef::Prefix, + + /// Show const generic parameter name inlay hints. + inlayHints_genericParameterHints_const_enable: bool = true, + + /// Show generic lifetime parameter name inlay hints. inlayHints_genericParameterHints_lifetime_enable: bool = false, - /// Whether to show generic type parameter name inlay hints. + + /// Show generic type parameter name inlay hints. inlayHints_genericParameterHints_type_enable: bool = false, - /// Whether to show implicit drop hints. - inlayHints_implicitDrops_enable: bool = false, - /// Whether to show inlay hints for the implied type parameter `Sized` bound. - inlayHints_implicitSizedBoundHints_enable: bool = false, - /// Whether to show inlay type hints for elided lifetimes in function signatures. + + /// Show implicit drop hints. + inlayHints_implicitDrops_enable: bool = false, + + /// Show inlay hints for the implied type parameter `Sized` bound. + inlayHints_implicitSizedBoundHints_enable: bool = false, + + /// Show inlay type hints for elided lifetimes in function signatures. inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = LifetimeElisionDef::Never, - /// Whether to prefer using parameter names as the name for elided lifetime hints if possible. - inlayHints_lifetimeElisionHints_useParameterNames: bool = false, + + /// Prefer using parameter names as the name for elided lifetime hints if possible. + inlayHints_lifetimeElisionHints_useParameterNames: bool = false, + /// Maximum length for inlay hints. Set to null to have an unlimited length. - inlayHints_maxLength: Option = Some(25), - /// Whether to show function parameter name inlay hints at the call - /// site. - inlayHints_parameterHints_enable: bool = true, - /// Whether to show exclusive range inlay hints. - inlayHints_rangeExclusiveHints_enable: bool = false, - /// Whether to show inlay hints for compiler inserted reborrows. - /// This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#. - inlayHints_reborrowHints_enable: ReborrowHintsDef = ReborrowHintsDef::Never, + inlayHints_maxLength: Option = Some(25), + + /// Show function parameter name inlay hints at the call site. + inlayHints_parameterHints_enable: bool = true, + + /// Show exclusive range inlay hints. + inlayHints_rangeExclusiveHints_enable: bool = false, + + /// Show inlay hints for compiler inserted reborrows. + /// + /// This setting is deprecated in favor of + /// #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#. + inlayHints_reborrowHints_enable: ReborrowHintsDef = ReborrowHintsDef::Never, + /// Whether to render leading colons for type hints, and trailing colons for parameter hints. - inlayHints_renderColons: bool = true, - /// Whether to show inlay type hints for variables. - inlayHints_typeHints_enable: bool = true, - /// Whether to hide inlay type hints for `let` statements that initialize to a closure. - /// Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`. - inlayHints_typeHints_hideClosureInitialization: bool = false, - /// Whether to hide inlay parameter type hints for closures. - inlayHints_typeHints_hideClosureParameter:bool = false, - /// Whether to hide inlay type hints for constructors. - inlayHints_typeHints_hideNamedConstructor: bool = false, - - /// Enables the experimental support for interpreting tests. + inlayHints_renderColons: bool = true, + + /// Show inlay type hints for variables. + inlayHints_typeHints_enable: bool = true, + + /// Hide inlay type hints for `let` statements that initialize to a closure. + /// + /// Only applies to closures with blocks, same as + /// `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`. + inlayHints_typeHints_hideClosureInitialization: bool = false, + + /// Hide inlay parameter type hints for closures. + inlayHints_typeHints_hideClosureParameter: bool = false, + + /// Hide inlay type hints for constructors. + inlayHints_typeHints_hideNamedConstructor: bool = false, + + /// Enable the experimental support for interpreting tests. interpret_tests: bool = false, /// Join lines merges consecutive declaration and initialization of an assignment. joinLines_joinAssignments: bool = true, + /// Join lines inserts else between consecutive ifs. joinLines_joinElseIf: bool = true, + /// Join lines removes trailing commas. joinLines_removeTrailingComma: bool = true, + /// Join lines unwraps trivial blocks. joinLines_unwrapTrivialBlock: bool = true, - /// Whether to show `Debug` lens. Only applies when - /// `#rust-analyzer.lens.enable#` is set. - lens_debug_enable: bool = true, - /// Whether to show CodeLens in Rust files. - lens_enable: bool = true, - /// Whether to show `Implementations` lens. Only applies when - /// `#rust-analyzer.lens.enable#` is set. - lens_implementations_enable: bool = true, + /// Show `Debug` lens. Only applies when `#rust-analyzer.lens.enable#` is set. + lens_debug_enable: bool = true, + + /// Show CodeLens in Rust files. + lens_enable: bool = true, + + /// Show `Implementations` lens. Only applies when `#rust-analyzer.lens.enable#` is set. + lens_implementations_enable: bool = true, + /// Where to render annotations. lens_location: AnnotationLocation = AnnotationLocation::AboveName, - /// Whether to show `References` lens for Struct, Enum, and Union. - /// Only applies when `#rust-analyzer.lens.enable#` is set. + + /// Show `References` lens for Struct, Enum, and Union. Only applies when + /// `#rust-analyzer.lens.enable#` is set. lens_references_adt_enable: bool = false, - /// Whether to show `References` lens for Enum Variants. - /// Only applies when `#rust-analyzer.lens.enable#` is set. - lens_references_enumVariant_enable: bool = false, - /// Whether to show `Method References` lens. Only applies when + + /// Show `References` lens for Enum Variants. Only applies when /// `#rust-analyzer.lens.enable#` is set. + lens_references_enumVariant_enable: bool = false, + + /// Show `Method References` lens. Only applies when `#rust-analyzer.lens.enable#` is set. lens_references_method_enable: bool = false, - /// Whether to show `References` lens for Trait. - /// Only applies when `#rust-analyzer.lens.enable#` is set. + + /// Show `References` lens for Trait. Only applies when `#rust-analyzer.lens.enable#` is + /// set. lens_references_trait_enable: bool = false, - /// Whether to show `Run` lens. Only applies when - /// `#rust-analyzer.lens.enable#` is set. - lens_run_enable: bool = true, - /// Whether to show `Update Test` lens. Only applies when - /// `#rust-analyzer.lens.enable#` and `#rust-analyzer.lens.run.enable#` are set. + + /// Show `Run` lens. Only applies when `#rust-analyzer.lens.enable#` is set. + lens_run_enable: bool = true, + + /// Show `Update Test` lens. Only applies when `#rust-analyzer.lens.enable#` and + /// `#rust-analyzer.lens.run.enable#` are set. lens_updateTest_enable: bool = true, - /// Disable project auto-discovery in favor of explicitly specified set - /// of projects. + /// Disable project auto-discovery in favor of explicitly specified set of projects. /// - /// Elements must be paths pointing to `Cargo.toml`, - /// `rust-project.json`, `.rs` files (which will be treated as standalone files) or JSON - /// objects in `rust-project.json` format. + /// Elements must be paths pointing to `Cargo.toml`, `rust-project.json`, `.rs` files (which + /// will be treated as standalone files) or JSON objects in `rust-project.json` format. linkedProjects: Vec = vec![], /// Number of syntax trees rust-analyzer keeps in memory. Defaults to 128. - lru_capacity: Option = None, - /// Sets the LRU capacity of the specified queries. + lru_capacity: Option = None, + + /// The LRU capacity of the specified queries. lru_query_capacities: FxHashMap, u16> = FxHashMap::default(), - /// Whether to show `can't find Cargo.toml` error message. - notifications_cargoTomlNotFound: bool = true, + /// Show `can't find Cargo.toml` error message. + notifications_cargoTomlNotFound: bool = true, - /// How many worker threads in the main loop. The default `null` means to pick automatically. + /// The number of worker threads in the main loop. The default `null` means to pick + /// automatically. numThreads: Option = None, /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set. procMacro_attributes_enable: bool = true, + /// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`. - procMacro_enable: bool = true, + procMacro_enable: bool = true, + /// Internal config, path to proc-macro server executable. - procMacro_server: Option = None, + procMacro_server: Option = None, /// Exclude imports from find-all-references. references_excludeImports: bool = false, @@ -300,31 +379,41 @@ config_data! { /// When enabled, rust-analyzer will highlight rust source in doc comments as well as intra /// doc links. semanticHighlighting_doc_comment_inject_enable: bool = true, - /// Whether the server is allowed to emit non-standard tokens and modifiers. + + /// Emit non-standard tokens and modifiers + /// + /// When enabled, rust-analyzer will emit tokens and modifiers that are not part of the + /// standard set of semantic tokens. semanticHighlighting_nonStandardTokens: bool = true, + /// Use semantic tokens for operators. /// /// When disabled, rust-analyzer will emit semantic tokens only for operator tokens when /// they are tagged with modifiers. semanticHighlighting_operator_enable: bool = true, + /// Use specialized semantic tokens for operators. /// /// When enabled, rust-analyzer will emit special token types for operator tokens instead /// of the generic `operator` token type. semanticHighlighting_operator_specialization_enable: bool = false, + /// Use semantic tokens for punctuation. /// /// When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when /// they are tagged with modifiers or have a special role. semanticHighlighting_punctuation_enable: bool = false, + /// When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro /// calls. semanticHighlighting_punctuation_separate_macro_bang: bool = false, + /// Use specialized semantic tokens for punctuation. /// /// When enabled, rust-analyzer will emit special token types for punctuation tokens instead /// of the generic `punctuation` token type. semanticHighlighting_punctuation_specialization_enable: bool = false, + /// Use semantic tokens for strings. /// /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars. @@ -333,16 +422,21 @@ config_data! { semanticHighlighting_strings_enable: bool = true, /// Show full signature of the callable. Only shows parameters if disabled. - signatureInfo_detail: SignatureDetail = SignatureDetail::Full, + signatureInfo_detail: SignatureDetail = SignatureDetail::Full, + /// Show documentation. - signatureInfo_documentation_enable: bool = true, + signatureInfo_documentation_enable: bool = true, /// Specify the characters allowed to invoke special on typing triggers. - /// - typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing expression + /// + /// - typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing + /// expression /// - typing `=` between two expressions adds `;` when in statement position - /// - typing `=` to turn an assignment into an equality comparison removes `;` when in expression position + /// - typing `=` to turn an assignment into an equality comparison removes `;` when in + /// expression position /// - typing `.` in a chain method call auto-indents - /// - typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the expression + /// - typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the + /// expression /// - typing `{` in a use item adds a closing `}` in the right place /// - typing `>` to complete a return type `->` will insert a whitespace after it /// - typing `<` in a path or type position inserts a closing `>` after the path or type. @@ -374,8 +468,8 @@ config_data! { /// /// **Warning**: This format is provisional and subject to change. /// - /// [`DiscoverWorkspaceConfig::command`] *must* return a JSON object - /// corresponding to `DiscoverProjectData::Finished`: + /// [`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to + /// `DiscoverProjectData::Finished`: /// /// ```norun /// #[derive(Debug, Clone, Deserialize, Serialize)] @@ -405,12 +499,11 @@ config_data! { /// } /// ``` /// - /// It is encouraged, but not required, to use the other variants on - /// `DiscoverProjectData` to provide a more polished end-user experience. + /// It is encouraged, but not required, to use the other variants on `DiscoverProjectData` + /// to provide a more polished end-user experience. /// - /// `DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, - /// which will be substituted with the JSON-serialized form of the following - /// enum: + /// `DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be + /// substituted with the JSON-serialized form of the following enum: /// /// ```norun /// #[derive(PartialEq, Clone, Debug, Serialize)] @@ -437,11 +530,10 @@ config_data! { /// } /// ``` /// - /// `DiscoverArgument::Path` is used to find and generate a `rust-project.json`, - /// and therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to - /// to update an existing workspace. As a reference for implementors, - /// buck2's `rust-project` will likely be useful: - /// https://github.com/facebook/buck2/tree/main/integrations/rust-project. + /// `DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and + /// therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an + /// existing workspace. As a reference for implementors, buck2's `rust-project` will likely + /// be useful: https://github.com/facebook/buck2/tree/main/integrations/rust-project. workspace_discoverConfig: Option = None, } } @@ -449,109 +541,154 @@ config_data! { config_data! { /// Local configurations can be defined per `SourceRoot`. This almost always corresponds to a `Crate`. local: struct LocalDefaultConfigData <- LocalConfigInput -> { - /// Whether to insert #[must_use] when generating `as_` methods - /// for enum variants. - assist_emitMustUse: bool = false, + /// Insert #[must_use] when generating `as_` methods for enum variants. + assist_emitMustUse: bool = false, + /// Placeholder expression to use for missing expressions in assists. - assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo, - /// When inserting a type (e.g. in "fill match arms" assist), prefer to use `Self` over the type name where possible. + assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo, + + /// Prefer to use `Self` over the type name when inserting a type (e.g. in "fill match arms" assist). assist_preferSelf: bool = false, - /// Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check. + + /// Enable borrow checking for term search code assists. If set to false, also there will be + /// more suggestions, but some of them may not borrow-check. assist_termSearch_borrowcheck: bool = true, + /// Term search fuel in "units of work" for assists (Defaults to 1800). assist_termSearch_fuel: usize = 1800, - - /// Whether to automatically add a semicolon when completing unit-returning functions. + /// Automatically add a semicolon when completing unit-returning functions. /// /// In `match` arms it completes a comma instead. completion_addSemicolonToUnit: bool = true, - /// Toggles the additional completions that automatically show method calls and field accesses with `await` prefixed to them when completing on a future. - completion_autoAwait_enable: bool = true, - /// Toggles the additional completions that automatically show method calls with `iter()` or `into_iter()` prefixed to them when completing on a type that has them. - completion_autoIter_enable: bool = true, - /// Toggles the additional completions that automatically add imports when completed. - /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. - completion_autoimport_enable: bool = true, + + /// Show method calls and field accesses completions with `await` prefixed to them when + /// completing on a future. + completion_autoAwait_enable: bool = true, + + /// Show method call completions with `iter()` or `into_iter()` prefixed to them when + /// completing on a type that has them. + completion_autoIter_enable: bool = true, + + /// Show completions that automatically add imports when completed. + /// + /// Note that your client must specify the `additionalTextEdits` LSP client capability to + /// truly have this feature enabled. + completion_autoimport_enable: bool = true, + /// A list of full paths to items to exclude from auto-importing completions. /// /// Traits in this list won't have their methods suggested in completions unless the trait /// is in scope. /// - /// You can either specify a string path which defaults to type "always" or use the more verbose - /// form `{ "path": "path::to::item", type: "always" }`. + /// You can either specify a string path which defaults to type "always" or use the more + /// verbose form `{ "path": "path::to::item", type: "always" }`. /// - /// For traits the type "methods" can be used to only exclude the methods but not the trait itself. + /// For traits the type "methods" can be used to only exclude the methods but not the trait + /// itself. /// /// This setting also inherits `#rust-analyzer.completion.excludeTraits#`. completion_autoimport_exclude: Vec = vec![ AutoImportExclusion::Verbose { path: "core::borrow::Borrow".to_owned(), r#type: AutoImportExclusionType::Methods }, AutoImportExclusion::Verbose { path: "core::borrow::BorrowMut".to_owned(), r#type: AutoImportExclusionType::Methods }, ], - /// Toggles the additional completions that automatically show method calls and field accesses - /// with `self` prefixed to them when inside a method. - completion_autoself_enable: bool = true, - /// Whether to add parenthesis and argument snippets when completing function. - completion_callable_snippets: CallableCompletionDef = CallableCompletionDef::FillArguments, + + /// Show method calls and field access completions with `self` prefixed to them when + /// inside a method. + completion_autoself_enable: bool = true, + + /// Add parenthesis and argument snippets when completing function. + completion_callable_snippets: CallableCompletionDef = CallableCompletionDef::FillArguments, + /// A list of full paths to traits whose methods to exclude from completion. /// - /// Methods from these traits won't be completed, even if the trait is in scope. However, they will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or `T where T: Trait`. + /// Methods from these traits won't be completed, even if the trait is in scope. However, + /// they will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or + /// `T where T: Trait`. /// /// Note that the trait themselves can still be completed. completion_excludeTraits: Vec = Vec::new(), - /// Whether to show full function/method signatures in completion docs. + + /// Show full function / method signatures in completion docs. completion_fullFunctionSignatures_enable: bool = false, - /// Whether to omit deprecated items from autocompletion. By default they are marked as deprecated but not hidden. + + /// Omit deprecated items from completions. By default they are marked as deprecated but not + /// hidden. completion_hideDeprecated: bool = false, + /// Maximum number of completions to return. If `None`, the limit is infinite. completion_limit: Option = None, - /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc. - completion_postfix_enable: bool = true, - /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position. + + /// Show postfix snippets like `dbg`, `if`, `not`, etc. + completion_postfix_enable: bool = true, + + /// Show completions of private items and fields that are defined in the current workspace + /// even if they are not visible at the current position. completion_privateEditable_enable: bool = false, - /// Whether to enable term search based snippets like `Some(foo.bar().baz())`. + + /// Enable term search based snippets like `Some(foo.bar().baz())`. completion_termSearch_enable: bool = false, + /// Term search fuel in "units of work" for autocompletion (Defaults to 1000). completion_termSearch_fuel: usize = 1000, /// List of rust-analyzer diagnostics to disable. diagnostics_disabled: FxHashSet = FxHashSet::default(), - /// Whether to show native rust-analyzer diagnostics. - diagnostics_enable: bool = true, - /// Whether to show experimental rust-analyzer diagnostics that might - /// have more false positives than usual. - diagnostics_experimental_enable: bool = false, - /// Map of prefixes to be substituted when parsing diagnostic file paths. - /// This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`. + + /// Show native rust-analyzer diagnostics. + diagnostics_enable: bool = true, + + /// Show experimental rust-analyzer diagnostics that might have more false positives than + /// usual. + diagnostics_experimental_enable: bool = false, + + /// Map of prefixes to be substituted when parsing diagnostic file paths. This should be the + /// reverse mapping of what is passed to `rustc` as `--remap-path-prefix`. diagnostics_remapPrefix: FxHashMap = FxHashMap::default(), - /// Whether to run additional style lints. - diagnostics_styleLints_enable: bool = false, + + /// Run additional style lints. + diagnostics_styleLints_enable: bool = false, + /// List of warnings that should be displayed with hint severity. /// - /// The warnings will be indicated by faded text or three dots in code - /// and will not show up in the `Problems Panel`. + /// The warnings will be indicated by faded text or three dots in code and will not show up + /// in the `Problems Panel`. diagnostics_warningsAsHint: Vec = vec![], + /// List of warnings that should be displayed with info severity. /// - /// The warnings will be indicated by a blue squiggly underline in code - /// and a blue icon in the `Problems Panel`. + /// The warnings will be indicated by a blue squiggly underline in code and a blue icon in + /// the `Problems Panel`. diagnostics_warningsAsInfo: Vec = vec![], - /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file. - imports_granularity_enforce: bool = false, + /// Enforce the import granularity setting for all files. If set to false rust-analyzer will + /// try to keep import styles consistent per file. + imports_granularity_enforce: bool = false, + /// How imports should be grouped into use statements. - imports_granularity_group: ImportGranularityDef = ImportGranularityDef::Crate, - /// Group inserted imports by the [following order](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are separated by newlines. - imports_group_enable: bool = true, - /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`. - imports_merge_glob: bool = true, + imports_granularity_group: ImportGranularityDef = ImportGranularityDef::Crate, + + /// Group inserted imports by the [following + /// order](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are + /// separated by newlines. + imports_group_enable: bool = true, + + /// Allow import insertion to merge new imports into single path glob imports like `use + /// std::fmt::*;`. + imports_merge_glob: bool = true, + /// Prefer to unconditionally use imports of the core and alloc crate, over the std crate. imports_preferNoStd | imports_prefer_no_std: bool = false, - /// Whether to prefer import paths containing a `prelude` module. - imports_preferPrelude: bool = false, + + /// Prefer import paths containing a `prelude` module. + imports_preferPrelude: bool = false, + /// The path structure for newly inserted paths to use. - imports_prefix: ImportPrefixDef = ImportPrefixDef::ByCrate, - /// Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;". + imports_prefix: ImportPrefixDef = ImportPrefixDef::ByCrate, + + /// Prefix external (including std, core) crate imports with `::`. + /// + /// E.g. `use ::std::io::Read;`. imports_prefixExternPrelude: bool = false, } } @@ -3203,8 +3340,10 @@ fn schema(fields: &[SchemaField]) -> serde_json::Value { .iter() .map(|(field, ty, doc, default)| { let name = field.replace('_', "."); - let category = - name.find('.').map(|end| String::from(&name[..end])).unwrap_or("general".into()); + let category = name + .split_once(".") + .map(|(category, _name)| to_title_case(category)) + .unwrap_or("rust-analyzer".into()); let name = format!("rust-analyzer.{name}"); let props = field_props(field, ty, doc, default); serde_json::json!({ @@ -3218,6 +3357,29 @@ fn schema(fields: &[SchemaField]) -> serde_json::Value { map.into() } +/// Translate a field name to a title case string suitable for use in the category names on the +/// vscode settings page. +/// +/// First letter of word should be uppercase, if an uppercase letter is encountered, add a space +/// before it e.g. "fooBar" -> "Foo Bar", "fooBarBaz" -> "Foo Bar Baz", "foo" -> "Foo" +/// +/// This likely should be in stdx (or just use heck instead), but it doesn't handle any edge cases +/// and is intentionally simple. +fn to_title_case(s: &str) -> String { + let mut result = String::with_capacity(s.len()); + let mut chars = s.chars(); + if let Some(first) = chars.next() { + result.push(first.to_ascii_uppercase()); + for c in chars { + if c.is_uppercase() { + result.push(' '); + } + result.push(c); + } + } + result +} + fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json::Value { let doc = doc_comment_to_string(doc); let doc = doc.trim_end_matches('\n'); diff --git a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md index ebac26e1d60a5..05299f1d017ef 100644 --- a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md +++ b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md @@ -2,8 +2,7 @@ Default: `false` -Whether to insert #[must_use] when generating `as_` methods -for enum variants. +Insert #[must_use] when generating `as_` methods for enum variants. ## rust-analyzer.assist.expressionFillDefault {#assist.expressionFillDefault} @@ -17,14 +16,15 @@ Placeholder expression to use for missing expressions in assists. Default: `false` -When inserting a type (e.g. in "fill match arms" assist), prefer to use `Self` over the type name where possible. +Prefer to use `Self` over the type name when inserting a type (e.g. in "fill match arms" assist). ## rust-analyzer.assist.termSearch.borrowcheck {#assist.termSearch.borrowcheck} Default: `true` -Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check. +Enable borrow checking for term search code assists. If set to false, also there will be +more suggestions, but some of them may not borrow-check. ## rust-analyzer.assist.termSearch.fuel {#assist.termSearch.fuel} @@ -45,7 +45,8 @@ Warm up caches on project load. Default: `"physical"` -How many worker threads to handle priming caches. The default `0` means to pick automatically. +How many worker threads to handle priming caches. The default `0` means to pick +automatically. ## rust-analyzer.cargo.allTargets {#cargo.allTargets} @@ -358,7 +359,7 @@ check will be performed. Default: `true` -Whether to automatically add a semicolon when completing unit-returning functions. +Automatically add a semicolon when completing unit-returning functions. In `match` arms it completes a comma instead. @@ -367,22 +368,26 @@ In `match` arms it completes a comma instead. Default: `true` -Toggles the additional completions that automatically show method calls and field accesses with `await` prefixed to them when completing on a future. +Show method calls and field accesses completions with `await` prefixed to them when +completing on a future. ## rust-analyzer.completion.autoIter.enable {#completion.autoIter.enable} Default: `true` -Toggles the additional completions that automatically show method calls with `iter()` or `into_iter()` prefixed to them when completing on a type that has them. +Show method call completions with `iter()` or `into_iter()` prefixed to them when +completing on a type that has them. ## rust-analyzer.completion.autoimport.enable {#completion.autoimport.enable} Default: `true` -Toggles the additional completions that automatically add imports when completed. -Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. +Show completions that automatically add imports when completed. + +Note that your client must specify the `additionalTextEdits` LSP client capability to +truly have this feature enabled. ## rust-analyzer.completion.autoimport.exclude {#completion.autoimport.exclude} @@ -406,10 +411,11 @@ A list of full paths to items to exclude from auto-importing completions. Traits in this list won't have their methods suggested in completions unless the trait is in scope. -You can either specify a string path which defaults to type "always" or use the more verbose -form `{ "path": "path::to::item", type: "always" }`. +You can either specify a string path which defaults to type "always" or use the more +verbose form `{ "path": "path::to::item", type: "always" }`. -For traits the type "methods" can be used to only exclude the methods but not the trait itself. +For traits the type "methods" can be used to only exclude the methods but not the trait +itself. This setting also inherits `#rust-analyzer.completion.excludeTraits#`. @@ -418,15 +424,15 @@ This setting also inherits `#rust-analyzer.completion.excludeTraits#`. Default: `true` -Toggles the additional completions that automatically show method calls and field accesses -with `self` prefixed to them when inside a method. +Show method calls and field access completions with `self` prefixed to them when +inside a method. ## rust-analyzer.completion.callable.snippets {#completion.callable.snippets} Default: `"fill_arguments"` -Whether to add parenthesis and argument snippets when completing function. +Add parenthesis and argument snippets when completing function. ## rust-analyzer.completion.excludeTraits {#completion.excludeTraits} @@ -435,7 +441,9 @@ Default: `[]` A list of full paths to traits whose methods to exclude from completion. -Methods from these traits won't be completed, even if the trait is in scope. However, they will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or `T where T: Trait`. +Methods from these traits won't be completed, even if the trait is in scope. However, +they will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or +`T where T: Trait`. Note that the trait themselves can still be completed. @@ -444,14 +452,15 @@ Note that the trait themselves can still be completed. Default: `false` -Whether to show full function/method signatures in completion docs. +Show full function / method signatures in completion docs. ## rust-analyzer.completion.hideDeprecated {#completion.hideDeprecated} Default: `false` -Whether to omit deprecated items from autocompletion. By default they are marked as deprecated but not hidden. +Omit deprecated items from completions. By default they are marked as deprecated but not +hidden. ## rust-analyzer.completion.limit {#completion.limit} @@ -465,14 +474,15 @@ Maximum number of completions to return. If `None`, the limit is infinite. Default: `true` -Whether to show postfix snippets like `dbg`, `if`, `not`, etc. +Show postfix snippets like `dbg`, `if`, `not`, etc. ## rust-analyzer.completion.privateEditable.enable {#completion.privateEditable.enable} Default: `false` -Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position. +Show completions of private items and fields that are defined in the current workspace +even if they are not visible at the current position. ## rust-analyzer.completion.snippets.custom {#completion.snippets.custom} @@ -529,7 +539,7 @@ Custom completion snippets. Default: `false` -Whether to enable term search based snippets like `Some(foo.bar().baz())`. +Enable term search based snippets like `Some(foo.bar().baz())`. ## rust-analyzer.completion.termSearch.fuel {#completion.termSearch.fuel} @@ -550,30 +560,30 @@ List of rust-analyzer diagnostics to disable. Default: `true` -Whether to show native rust-analyzer diagnostics. +Show native rust-analyzer diagnostics. ## rust-analyzer.diagnostics.experimental.enable {#diagnostics.experimental.enable} Default: `false` -Whether to show experimental rust-analyzer diagnostics that might -have more false positives than usual. +Show experimental rust-analyzer diagnostics that might have more false positives than +usual. ## rust-analyzer.diagnostics.remapPrefix {#diagnostics.remapPrefix} Default: `{}` -Map of prefixes to be substituted when parsing diagnostic file paths. -This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`. +Map of prefixes to be substituted when parsing diagnostic file paths. This should be the +reverse mapping of what is passed to `rustc` as `--remap-path-prefix`. ## rust-analyzer.diagnostics.styleLints.enable {#diagnostics.styleLints.enable} Default: `false` -Whether to run additional style lints. +Run additional style lints. ## rust-analyzer.diagnostics.warningsAsHint {#diagnostics.warningsAsHint} @@ -582,8 +592,8 @@ Default: `[]` List of warnings that should be displayed with hint severity. -The warnings will be indicated by faded text or three dots in code -and will not show up in the `Problems Panel`. +The warnings will be indicated by faded text or three dots in code and will not show up +in the `Problems Panel`. ## rust-analyzer.diagnostics.warningsAsInfo {#diagnostics.warningsAsInfo} @@ -592,17 +602,19 @@ Default: `[]` List of warnings that should be displayed with info severity. -The warnings will be indicated by a blue squiggly underline in code -and a blue icon in the `Problems Panel`. +The warnings will be indicated by a blue squiggly underline in code and a blue icon in +the `Problems Panel`. ## rust-analyzer.files.exclude {#files.exclude} Default: `[]` -These paths (file/directories) will be ignored by rust-analyzer. They are -relative to the workspace root, and globs are not supported. You may -also need to add the folders to Code's `files.watcherExclude`. +List of files to ignore + +These paths (file/directories) will be ignored by rust-analyzer. They are relative to +the workspace root, and globs are not supported. You may also need to add the folders to +Code's `files.watcherExclude`. ## rust-analyzer.files.watcher {#files.watcher} @@ -616,64 +628,67 @@ Controls file watching implementation. Default: `true` -Enables highlighting of related return values while the cursor is on any `match`, `if`, or match arm arrow (`=>`). +Highlight related return values while the cursor is on any `match`, `if`, or match arm +arrow (`=>`). ## rust-analyzer.highlightRelated.breakPoints.enable {#highlightRelated.breakPoints.enable} Default: `true` -Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords. +Highlight related references while the cursor is on `break`, `loop`, `while`, or `for` +keywords. ## rust-analyzer.highlightRelated.closureCaptures.enable {#highlightRelated.closureCaptures.enable} Default: `true` -Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure. +Highlight all captures of a closure while the cursor is on the `|` or move keyword of a closure. ## rust-analyzer.highlightRelated.exitPoints.enable {#highlightRelated.exitPoints.enable} Default: `true` -Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`). +Highlight all exit points while the cursor is on any `return`, `?`, `fn`, or return type +arrow (`->`). ## rust-analyzer.highlightRelated.references.enable {#highlightRelated.references.enable} Default: `true` -Enables highlighting of related references while the cursor is on any identifier. +Highlight related references while the cursor is on any identifier. ## rust-analyzer.highlightRelated.yieldPoints.enable {#highlightRelated.yieldPoints.enable} Default: `true` -Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords. +Highlight all break points for a loop or block context while the cursor is on any +`async` or `await` keywords. ## rust-analyzer.hover.actions.debug.enable {#hover.actions.debug.enable} Default: `true` -Whether to show `Debug` action. Only applies when -`#rust-analyzer.hover.actions.enable#` is set. +Show `Debug` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set. ## rust-analyzer.hover.actions.enable {#hover.actions.enable} Default: `true` -Whether to show HoverActions in Rust files. +Show HoverActions in Rust files. ## rust-analyzer.hover.actions.gotoTypeDef.enable {#hover.actions.gotoTypeDef.enable} Default: `true` -Whether to show `Go to Type Definition` action. Only applies when +Show `Go to Type Definition` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set. @@ -681,46 +696,45 @@ Whether to show `Go to Type Definition` action. Only applies when Default: `true` -Whether to show `Implementations` action. Only applies when -`#rust-analyzer.hover.actions.enable#` is set. +Show `Implementations` action. Only applies when `#rust-analyzer.hover.actions.enable#` +is set. ## rust-analyzer.hover.actions.references.enable {#hover.actions.references.enable} Default: `false` -Whether to show `References` action. Only applies when -`#rust-analyzer.hover.actions.enable#` is set. +Show `References` action. Only applies when `#rust-analyzer.hover.actions.enable#` is +set. ## rust-analyzer.hover.actions.run.enable {#hover.actions.run.enable} Default: `true` -Whether to show `Run` action. Only applies when -`#rust-analyzer.hover.actions.enable#` is set. +Show `Run` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set. ## rust-analyzer.hover.actions.updateTest.enable {#hover.actions.updateTest.enable} Default: `true` -Whether to show `Update Test` action. Only applies when -`#rust-analyzer.hover.actions.enable#` and `#rust-analyzer.hover.actions.run.enable#` are set. +Show `Update Test` action. Only applies when `#rust-analyzer.hover.actions.enable#` and +`#rust-analyzer.hover.actions.run.enable#` are set. ## rust-analyzer.hover.documentation.enable {#hover.documentation.enable} Default: `true` -Whether to show documentation on hover. +Show documentation on hover. ## rust-analyzer.hover.documentation.keywords.enable {#hover.documentation.keywords.enable} Default: `true` -Whether to show keyword hover popups. Only applies when +Show keyword hover popups. Only applies when `#rust-analyzer.hover.documentation.enable#` is set. @@ -728,7 +742,7 @@ Whether to show keyword hover popups. Only applies when Default: `true` -Whether to show drop glue information on hover. +Show drop glue information on hover. ## rust-analyzer.hover.links.enable {#hover.links.enable} @@ -742,9 +756,11 @@ Use markdown syntax for links on hover. Default: `20` -Whether to show what types are used as generic arguments in calls etc. on hover, and what is their max length to show such types, beyond it they will be shown with ellipsis. +Show what types are used as generic arguments in calls etc. on hover, and limit the max +length to show such types, beyond which they will be shown with ellipsis. -This can take three values: `null` means "unlimited", the string `"hide"` means to not show generic substitutions at all, and a number means to limit them to X characters. +This can take three values: `null` means "unlimited", the string `"hide"` means to not +show generic substitutions at all, and a number means to limit them to X characters. The default is 20 characters. @@ -760,7 +776,7 @@ How to render the align information in a memory layout hover. Default: `true` -Whether to show memory layout data on hover. +Show memory layout data on hover. ## rust-analyzer.hover.memoryLayout.niches {#hover.memoryLayout.niches} @@ -802,7 +818,8 @@ How many variants of an enum to display when hovering on. Show none if empty. Default: `5` -How many fields of a struct, variant or union to display when hovering on. Show none if empty. +How many fields of a struct, variant or union to display when hovering on. Show none if +empty. ## rust-analyzer.hover.show.traitAssocItems {#hover.show.traitAssocItems} @@ -816,7 +833,8 @@ How many associated items of a trait to display when hovering a trait. Default: `false` -Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file. +Enforce the import granularity setting for all files. If set to false rust-analyzer will +try to keep import styles consistent per file. ## rust-analyzer.imports.granularity.group {#imports.granularity.group} @@ -830,14 +848,17 @@ How imports should be grouped into use statements. Default: `true` -Group inserted imports by the [following order](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are separated by newlines. +Group inserted imports by the [following +order](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are +separated by newlines. ## rust-analyzer.imports.merge.glob {#imports.merge.glob} Default: `true` -Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`. +Allow import insertion to merge new imports into single path glob imports like `use +std::fmt::*;`. ## rust-analyzer.imports.preferNoStd {#imports.preferNoStd} @@ -851,7 +872,7 @@ Prefer to unconditionally use imports of the core and alloc crate, over the std Default: `false` -Whether to prefer import paths containing a `prelude` module. +Prefer import paths containing a `prelude` module. ## rust-analyzer.imports.prefix {#imports.prefix} @@ -865,28 +886,30 @@ The path structure for newly inserted paths to use. Default: `false` -Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;". +Prefix external (including std, core) crate imports with `::`. + +E.g. `use ::std::io::Read;`. ## rust-analyzer.inlayHints.bindingModeHints.enable {#inlayHints.bindingModeHints.enable} Default: `false` -Whether to show inlay type hints for binding modes. +Show inlay type hints for binding modes. ## rust-analyzer.inlayHints.chainingHints.enable {#inlayHints.chainingHints.enable} Default: `true` -Whether to show inlay type hints for method chains. +Show inlay type hints for method chains. ## rust-analyzer.inlayHints.closingBraceHints.enable {#inlayHints.closingBraceHints.enable} Default: `true` -Whether to show inlay hints after a closing `}` to indicate what item it belongs to. +Show inlay hints after a closing `}` to indicate what item it belongs to. ## rust-analyzer.inlayHints.closingBraceHints.minLines {#inlayHints.closingBraceHints.minLines} @@ -901,14 +924,14 @@ to always show them). Default: `false` -Whether to show inlay hints for closure captures. +Show inlay hints for closure captures. ## rust-analyzer.inlayHints.closureReturnTypeHints.enable {#inlayHints.closureReturnTypeHints.enable} Default: `"never"` -Whether to show inlay type hints for return types of closures. +Show inlay type hints for return types of closures. ## rust-analyzer.inlayHints.closureStyle {#inlayHints.closureStyle} @@ -922,77 +945,77 @@ Closure notation in type and chaining inlay hints. Default: `"never"` -Whether to show enum variant discriminant hints. +Show enum variant discriminant hints. ## rust-analyzer.inlayHints.expressionAdjustmentHints.enable {#inlayHints.expressionAdjustmentHints.enable} Default: `"never"` -Whether to show inlay hints for type adjustments. +Show inlay hints for type adjustments. ## rust-analyzer.inlayHints.expressionAdjustmentHints.hideOutsideUnsafe {#inlayHints.expressionAdjustmentHints.hideOutsideUnsafe} Default: `false` -Whether to hide inlay hints for type adjustments outside of `unsafe` blocks. +Hide inlay hints for type adjustments outside of `unsafe` blocks. ## rust-analyzer.inlayHints.expressionAdjustmentHints.mode {#inlayHints.expressionAdjustmentHints.mode} Default: `"prefix"` -Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). +Show inlay hints as postfix ops (`.*` instead of `*`, etc). ## rust-analyzer.inlayHints.genericParameterHints.const.enable {#inlayHints.genericParameterHints.const.enable} Default: `true` -Whether to show const generic parameter name inlay hints. +Show const generic parameter name inlay hints. ## rust-analyzer.inlayHints.genericParameterHints.lifetime.enable {#inlayHints.genericParameterHints.lifetime.enable} Default: `false` -Whether to show generic lifetime parameter name inlay hints. +Show generic lifetime parameter name inlay hints. ## rust-analyzer.inlayHints.genericParameterHints.type.enable {#inlayHints.genericParameterHints.type.enable} Default: `false` -Whether to show generic type parameter name inlay hints. +Show generic type parameter name inlay hints. ## rust-analyzer.inlayHints.implicitDrops.enable {#inlayHints.implicitDrops.enable} Default: `false` -Whether to show implicit drop hints. +Show implicit drop hints. ## rust-analyzer.inlayHints.implicitSizedBoundHints.enable {#inlayHints.implicitSizedBoundHints.enable} Default: `false` -Whether to show inlay hints for the implied type parameter `Sized` bound. +Show inlay hints for the implied type parameter `Sized` bound. ## rust-analyzer.inlayHints.lifetimeElisionHints.enable {#inlayHints.lifetimeElisionHints.enable} Default: `"never"` -Whether to show inlay type hints for elided lifetimes in function signatures. +Show inlay type hints for elided lifetimes in function signatures. ## rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames {#inlayHints.lifetimeElisionHints.useParameterNames} Default: `false` -Whether to prefer using parameter names as the name for elided lifetime hints if possible. +Prefer using parameter names as the name for elided lifetime hints if possible. ## rust-analyzer.inlayHints.maxLength {#inlayHints.maxLength} @@ -1006,23 +1029,24 @@ Maximum length for inlay hints. Set to null to have an unlimited length. Default: `true` -Whether to show function parameter name inlay hints at the call -site. +Show function parameter name inlay hints at the call site. ## rust-analyzer.inlayHints.rangeExclusiveHints.enable {#inlayHints.rangeExclusiveHints.enable} Default: `false` -Whether to show exclusive range inlay hints. +Show exclusive range inlay hints. ## rust-analyzer.inlayHints.reborrowHints.enable {#inlayHints.reborrowHints.enable} Default: `"never"` -Whether to show inlay hints for compiler inserted reborrows. -This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#. +Show inlay hints for compiler inserted reborrows. + +This setting is deprecated in favor of +#rust-analyzer.inlayHints.expressionAdjustmentHints.enable#. ## rust-analyzer.inlayHints.renderColons {#inlayHints.renderColons} @@ -1036,36 +1060,38 @@ Whether to render leading colons for type hints, and trailing colons for paramet Default: `true` -Whether to show inlay type hints for variables. +Show inlay type hints for variables. ## rust-analyzer.inlayHints.typeHints.hideClosureInitialization {#inlayHints.typeHints.hideClosureInitialization} Default: `false` -Whether to hide inlay type hints for `let` statements that initialize to a closure. -Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`. +Hide inlay type hints for `let` statements that initialize to a closure. + +Only applies to closures with blocks, same as +`#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`. ## rust-analyzer.inlayHints.typeHints.hideClosureParameter {#inlayHints.typeHints.hideClosureParameter} Default: `false` -Whether to hide inlay parameter type hints for closures. +Hide inlay parameter type hints for closures. ## rust-analyzer.inlayHints.typeHints.hideNamedConstructor {#inlayHints.typeHints.hideNamedConstructor} Default: `false` -Whether to hide inlay type hints for constructors. +Hide inlay type hints for constructors. ## rust-analyzer.interpret.tests {#interpret.tests} Default: `false` -Enables the experimental support for interpreting tests. +Enable the experimental support for interpreting tests. ## rust-analyzer.joinLines.joinAssignments {#joinLines.joinAssignments} @@ -1100,23 +1126,21 @@ Join lines unwraps trivial blocks. Default: `true` -Whether to show `Debug` lens. Only applies when -`#rust-analyzer.lens.enable#` is set. +Show `Debug` lens. Only applies when `#rust-analyzer.lens.enable#` is set. ## rust-analyzer.lens.enable {#lens.enable} Default: `true` -Whether to show CodeLens in Rust files. +Show CodeLens in Rust files. ## rust-analyzer.lens.implementations.enable {#lens.implementations.enable} Default: `true` -Whether to show `Implementations` lens. Only applies when -`#rust-analyzer.lens.enable#` is set. +Show `Implementations` lens. Only applies when `#rust-analyzer.lens.enable#` is set. ## rust-analyzer.lens.location {#lens.location} @@ -1130,60 +1154,56 @@ Where to render annotations. Default: `false` -Whether to show `References` lens for Struct, Enum, and Union. -Only applies when `#rust-analyzer.lens.enable#` is set. +Show `References` lens for Struct, Enum, and Union. Only applies when +`#rust-analyzer.lens.enable#` is set. ## rust-analyzer.lens.references.enumVariant.enable {#lens.references.enumVariant.enable} Default: `false` -Whether to show `References` lens for Enum Variants. -Only applies when `#rust-analyzer.lens.enable#` is set. +Show `References` lens for Enum Variants. Only applies when +`#rust-analyzer.lens.enable#` is set. ## rust-analyzer.lens.references.method.enable {#lens.references.method.enable} Default: `false` -Whether to show `Method References` lens. Only applies when -`#rust-analyzer.lens.enable#` is set. +Show `Method References` lens. Only applies when `#rust-analyzer.lens.enable#` is set. ## rust-analyzer.lens.references.trait.enable {#lens.references.trait.enable} Default: `false` -Whether to show `References` lens for Trait. -Only applies when `#rust-analyzer.lens.enable#` is set. +Show `References` lens for Trait. Only applies when `#rust-analyzer.lens.enable#` is +set. ## rust-analyzer.lens.run.enable {#lens.run.enable} Default: `true` -Whether to show `Run` lens. Only applies when -`#rust-analyzer.lens.enable#` is set. +Show `Run` lens. Only applies when `#rust-analyzer.lens.enable#` is set. ## rust-analyzer.lens.updateTest.enable {#lens.updateTest.enable} Default: `true` -Whether to show `Update Test` lens. Only applies when -`#rust-analyzer.lens.enable#` and `#rust-analyzer.lens.run.enable#` are set. +Show `Update Test` lens. Only applies when `#rust-analyzer.lens.enable#` and +`#rust-analyzer.lens.run.enable#` are set. ## rust-analyzer.linkedProjects {#linkedProjects} Default: `[]` -Disable project auto-discovery in favor of explicitly specified set -of projects. +Disable project auto-discovery in favor of explicitly specified set of projects. -Elements must be paths pointing to `Cargo.toml`, -`rust-project.json`, `.rs` files (which will be treated as standalone files) or JSON -objects in `rust-project.json` format. +Elements must be paths pointing to `Cargo.toml`, `rust-project.json`, `.rs` files (which +will be treated as standalone files) or JSON objects in `rust-project.json` format. ## rust-analyzer.lru.capacity {#lru.capacity} @@ -1197,21 +1217,22 @@ Number of syntax trees rust-analyzer keeps in memory. Defaults to 128. Default: `{}` -Sets the LRU capacity of the specified queries. +The LRU capacity of the specified queries. ## rust-analyzer.notifications.cargoTomlNotFound {#notifications.cargoTomlNotFound} Default: `true` -Whether to show `can't find Cargo.toml` error message. +Show `can't find Cargo.toml` error message. ## rust-analyzer.numThreads {#numThreads} Default: `null` -How many worker threads in the main loop. The default `null` means to pick automatically. +The number of worker threads in the main loop. The default `null` means to pick +automatically. ## rust-analyzer.procMacro.attributes.enable {#procMacro.attributes.enable} @@ -1346,7 +1367,10 @@ doc links. Default: `true` -Whether the server is allowed to emit non-standard tokens and modifiers. +Emit non-standard tokens and modifiers + +When enabled, rust-analyzer will emit tokens and modifiers that are not part of the +standard set of semantic tokens. ## rust-analyzer.semanticHighlighting.operator.enable {#semanticHighlighting.operator.enable} @@ -1427,11 +1451,15 @@ Show documentation. Default: `"=."` Specify the characters allowed to invoke special on typing triggers. -- typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing expression + +- typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing + expression - typing `=` between two expressions adds `;` when in statement position -- typing `=` to turn an assignment into an equality comparison removes `;` when in expression position +- typing `=` to turn an assignment into an equality comparison removes `;` when in + expression position - typing `.` in a chain method call auto-indents -- typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the expression +- typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the + expression - typing `{` in a use item adds a closing `}` in the right place - typing `>` to complete a return type `->` will insert a whitespace after it - typing `<` in a path or type position inserts a closing `>` after the path or type. @@ -1475,8 +1503,8 @@ Below is an example of a valid configuration: **Warning**: This format is provisional and subject to change. -[`DiscoverWorkspaceConfig::command`] *must* return a JSON object -corresponding to `DiscoverProjectData::Finished`: +[`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to +`DiscoverProjectData::Finished`: ```norun #[derive(Debug, Clone, Deserialize, Serialize)] @@ -1506,12 +1534,11 @@ As JSON, `DiscoverProjectData::Finished` is: } ``` -It is encouraged, but not required, to use the other variants on -`DiscoverProjectData` to provide a more polished end-user experience. +It is encouraged, but not required, to use the other variants on `DiscoverProjectData` +to provide a more polished end-user experience. -`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, -which will be substituted with the JSON-serialized form of the following -enum: +`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be +substituted with the JSON-serialized form of the following enum: ```norun #[derive(PartialEq, Clone, Debug, Serialize)] @@ -1538,11 +1565,10 @@ Similarly, the JSON representation of `DiscoverArgument::Buildfile` is: } ``` -`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, -and therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to -to update an existing workspace. As a reference for implementors, -buck2's `rust-project` will likely be useful: -https://github.com/facebook/buck2/tree/main/integrations/rust-project. +`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and +therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an +existing workspace. As a reference for implementors, buck2's `rust-project` will likely +be useful: https://github.com/facebook/buck2/tree/main/integrations/rust-project. ## rust-analyzer.workspace.symbol.search.excludeImports {#workspace.symbol.search.excludeImports} diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 3cb4c21ee1fb2..8953a30dacb2c 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -354,35 +354,122 @@ ], "configuration": [ { - "title": "general", + "title": "Rust Analyzer" + }, + { + "title": "Assist" + }, + { + "title": "Cache Priming" + }, + { + "title": "Cargo" + }, + { + "title": "Cfg" + }, + { + "title": "Check" + }, + { + "title": "Completion" + }, + { + "title": "Debug" + }, + { + "title": "Diagnostics" + }, + { + "title": "Files" + }, + { + "title": "Highlight Related" + }, + { + "title": "Hover" + }, + { + "title": "Imports" + }, + { + "title": "Inlay Hints" + }, + { + "title": "Interpret" + }, + { + "title": "Join Lines" + }, + { + "title": "Lens" + }, + { + "title": "Lru" + }, + { + "title": "Notifications" + }, + { + "title": "Proc Macro" + }, + { + "title": "References" + }, + { + "title": "Runnables" + }, + { + "title": "Rustc" + }, + { + "title": "Rustfmt" + }, + { + "title": "Semantic Highlighting" + }, + { + "title": "Signature Info" + }, + { + "title": "Typing" + }, + { + "title": "Vfs" + }, + { + "title": "Workspace" + }, + { + "title": "rust-analyzer", "properties": { "rust-analyzer.restartServerOnConfigChange": { - "markdownDescription": "Whether to restart the server automatically when certain settings that require a restart are changed.", + "description": "Restart the server automatically when settings that require a restart are changed.", "default": false, "type": "boolean" }, "rust-analyzer.showUnlinkedFileNotification": { - "markdownDescription": "Whether to show a notification for unlinked files asking the user to add the corresponding Cargo.toml to the linked projects setting.", + "description": "Show a notification for unlinked files, prompting the user to add the corresponding Cargo.toml to the linked projects setting.", "default": true, "type": "boolean" }, "rust-analyzer.showRequestFailedErrorNotification": { - "markdownDescription": "Whether to show error notifications for failing requests.", + "description": "Show error notifications when requests fail.", "default": true, "type": "boolean" }, "rust-analyzer.showDependenciesExplorer": { - "markdownDescription": "Whether to show the dependencies view.", + "description": "Show Rust Dependencies in the Explorer view.", "default": true, "type": "boolean" }, "rust-analyzer.showSyntaxTree": { - "markdownDescription": "Whether to show the syntax tree view.", + "description": "Show Syntax Tree in the Explorer view.", "default": false, "type": "boolean" }, "rust-analyzer.testExplorer": { - "markdownDescription": "Whether to show the test explorer.", + "description": "Show the Test Explorer view.", "default": false, "type": "boolean" }, @@ -394,7 +481,7 @@ } }, { - "title": "runnables", + "title": "Runnables", "properties": { "rust-analyzer.runnables.extraEnv": { "anyOf": [ @@ -452,7 +539,7 @@ } }, { - "title": "statusBar", + "title": "Status Bar", "properties": { "rust-analyzer.statusBar.clickAction": { "type": "string", @@ -524,7 +611,7 @@ } }, { - "title": "server", + "title": "Server", "properties": { "rust-analyzer.server.path": { "type": [ @@ -553,7 +640,7 @@ } }, { - "title": "trace", + "title": "Trace", "properties": { "rust-analyzer.trace.server": { "type": "string", @@ -580,7 +667,7 @@ } }, { - "title": "debug", + "title": "Debug", "properties": { "rust-analyzer.debug.engine": { "type": "string", @@ -625,7 +712,7 @@ } }, { - "title": "typing", + "title": "Typing", "properties": { "rust-analyzer.typing.continueCommentsOnNewline": { "markdownDescription": "Whether to prefix newlines after comments with the corresponding comment prefix.", @@ -635,7 +722,7 @@ } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.previewRustcOutput": { "markdownDescription": "Whether to show the main part of the rendered rustc output of a diagnostic message.", @@ -653,17 +740,17 @@ "title": "$generated-start" }, { - "title": "assist", + "title": "Assist", "properties": { "rust-analyzer.assist.emitMustUse": { - "markdownDescription": "Whether to insert #[must_use] when generating `as_` methods\nfor enum variants.", + "markdownDescription": "Insert #[must_use] when generating `as_` methods for enum variants.", "default": false, "type": "boolean" } } }, { - "title": "assist", + "title": "Assist", "properties": { "rust-analyzer.assist.expressionFillDefault": { "markdownDescription": "Placeholder expression to use for missing expressions in assists.", @@ -681,27 +768,27 @@ } }, { - "title": "assist", + "title": "Assist", "properties": { "rust-analyzer.assist.preferSelf": { - "markdownDescription": "When inserting a type (e.g. in \"fill match arms\" assist), prefer to use `Self` over the type name where possible.", + "markdownDescription": "Prefer to use `Self` over the type name when inserting a type (e.g. in \"fill match arms\" assist).", "default": false, "type": "boolean" } } }, { - "title": "assist", + "title": "Assist", "properties": { "rust-analyzer.assist.termSearch.borrowcheck": { - "markdownDescription": "Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check.", + "markdownDescription": "Enable borrow checking for term search code assists. If set to false, also there will be\nmore suggestions, but some of them may not borrow-check.", "default": true, "type": "boolean" } } }, { - "title": "assist", + "title": "Assist", "properties": { "rust-analyzer.assist.termSearch.fuel": { "markdownDescription": "Term search fuel in \"units of work\" for assists (Defaults to 1800).", @@ -712,7 +799,7 @@ } }, { - "title": "cachePriming", + "title": "Cache Priming", "properties": { "rust-analyzer.cachePriming.enable": { "markdownDescription": "Warm up caches on project load.", @@ -722,10 +809,10 @@ } }, { - "title": "cachePriming", + "title": "Cache Priming", "properties": { "rust-analyzer.cachePriming.numThreads": { - "markdownDescription": "How many worker threads to handle priming caches. The default `0` means to pick automatically.", + "markdownDescription": "How many worker threads to handle priming caches. The default `0` means to pick\nautomatically.", "default": "physical", "anyOf": [ { @@ -749,7 +836,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.allTargets": { "markdownDescription": "Pass `--all-targets` to cargo invocation.", @@ -759,7 +846,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.autoreload": { "markdownDescription": "Automatically refresh project info via `cargo metadata` on\n`Cargo.toml` or `.cargo/config.toml` changes.", @@ -769,7 +856,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.buildScripts.enable": { "markdownDescription": "Run build scripts (`build.rs`) for more precise code analysis.", @@ -779,7 +866,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.buildScripts.invocationStrategy": { "markdownDescription": "Specifies the invocation strategy to use when running the build scripts command.\nIf `per_workspace` is set, the command will be executed for each Rust workspace with the\nworkspace as the working directory.\nIf `once` is set, the command will be executed once with the opened project as the\nworking directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", @@ -797,7 +884,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.buildScripts.overrideCommand": { "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#`.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets --keep-going\n```\n.", @@ -813,7 +900,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.buildScripts.rebuildOnSave": { "markdownDescription": "Rerun proc-macros building/build-scripts running when proc-macro\nor build-script sources change and are saved.", @@ -823,7 +910,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.buildScripts.useRustcWrapper": { "markdownDescription": "Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to\navoid checking unnecessary things.", @@ -833,7 +920,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.cfgs": { "markdownDescription": "List of cfg options to enable with the given values.\n\nTo enable a name without a value, use `\"key\"`.\nTo enable a name with a value, use `\"key=value\"`.\nTo disable, prefix the entry with a `!`.", @@ -849,7 +936,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.extraArgs": { "markdownDescription": "Extra arguments that are passed to every cargo invocation.", @@ -862,7 +949,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.extraEnv": { "markdownDescription": "Extra environment variables that will be set when running cargo, rustc\nor other commands within the workspace. Useful for setting RUSTFLAGS.", @@ -872,7 +959,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.features": { "markdownDescription": "List of features to activate.\n\nSet this to `\"all\"` to pass `--all-features` to cargo.", @@ -898,7 +985,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.noDefaultFeatures": { "markdownDescription": "Whether to pass `--no-default-features` to cargo.", @@ -908,7 +995,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.noDeps": { "markdownDescription": "Whether to skip fetching dependencies. If set to \"true\", the analysis is performed\nentirely offline, and Cargo metadata for dependencies is not fetched.", @@ -918,7 +1005,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.sysroot": { "markdownDescription": "Relative path to the sysroot, or \"discover\" to try to automatically find it via\n\"rustc --print sysroot\".\n\nUnsetting this disables sysroot loading.\n\nThis option does not take effect until rust-analyzer is restarted.", @@ -931,7 +1018,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.sysrootSrc": { "markdownDescription": "Relative path to the sysroot library sources. If left unset, this will default to\n`{cargo.sysroot}/lib/rustlib/src/rust/library`.\n\nThis option does not take effect until rust-analyzer is restarted.", @@ -944,7 +1031,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.target": { "markdownDescription": "Compilation target override (target tuple).", @@ -957,7 +1044,7 @@ } }, { - "title": "cargo", + "title": "Cargo", "properties": { "rust-analyzer.cargo.targetDir": { "markdownDescription": "Optional path to a rust-analyzer specific target directory.\nThis prevents rust-analyzer's `cargo check` and initial build-script and proc-macro\nbuilding from locking the `Cargo.lock` at the expense of duplicating build artifacts.\n\nSet to `true` to use a subdirectory of the existing target directory or\nset to a path relative to the workspace to use that path.", @@ -977,7 +1064,7 @@ } }, { - "title": "cfg", + "title": "Cfg", "properties": { "rust-analyzer.cfg.setTest": { "markdownDescription": "Set `cfg(test)` for local crates. Defaults to true.", @@ -987,7 +1074,7 @@ } }, { - "title": "general", + "title": "rust-analyzer", "properties": { "rust-analyzer.checkOnSave": { "markdownDescription": "Run the check command for diagnostics on save.", @@ -997,7 +1084,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.allTargets": { "markdownDescription": "Check all targets and tests (`--all-targets`). Defaults to\n`#rust-analyzer.cargo.allTargets#`.", @@ -1010,7 +1097,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.command": { "markdownDescription": "Cargo command to use for `cargo check`.", @@ -1020,7 +1107,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.extraArgs": { "markdownDescription": "Extra arguments for `cargo check`.", @@ -1033,7 +1120,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.extraEnv": { "markdownDescription": "Extra environment variables that will be set when running `cargo check`.\nExtends `#rust-analyzer.cargo.extraEnv#`.", @@ -1043,7 +1130,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.features": { "markdownDescription": "List of features to activate. Defaults to\n`#rust-analyzer.cargo.features#`.\n\nSet to `\"all\"` to pass `--all-features` to Cargo.", @@ -1072,7 +1159,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.ignore": { "markdownDescription": "List of `cargo check` (or other command specified in `check.command`) diagnostics to ignore.\n\nFor example for `cargo check`: `dead_code`, `unused_imports`, `unused_variables`,...", @@ -1086,7 +1173,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.invocationStrategy": { "markdownDescription": "Specifies the invocation strategy to use when running the check command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.check.overrideCommand#`\nis set.", @@ -1104,7 +1191,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.noDefaultFeatures": { "markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.", @@ -1117,7 +1204,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.overrideCommand": { "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the future.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", @@ -1133,7 +1220,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.targets": { "markdownDescription": "Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.\n\nCan be a single target, e.g. `\"x86_64-unknown-linux-gnu\"` or a list of targets, e.g.\n`[\"aarch64-apple-darwin\", \"x86_64-apple-darwin\"]`.\n\nAliased as `\"checkOnSave.targets\"`.", @@ -1156,7 +1243,7 @@ } }, { - "title": "check", + "title": "Check", "properties": { "rust-analyzer.check.workspace": { "markdownDescription": "Whether `--workspace` should be passed to `cargo check`.\nIf false, `-p ` will be passed instead if applicable. In case it is not, no\ncheck will be performed.", @@ -1166,50 +1253,50 @@ } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.addSemicolonToUnit": { - "markdownDescription": "Whether to automatically add a semicolon when completing unit-returning functions.\n\nIn `match` arms it completes a comma instead.", + "markdownDescription": "Automatically add a semicolon when completing unit-returning functions.\n\nIn `match` arms it completes a comma instead.", "default": true, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.autoAwait.enable": { - "markdownDescription": "Toggles the additional completions that automatically show method calls and field accesses with `await` prefixed to them when completing on a future.", + "markdownDescription": "Show method calls and field accesses completions with `await` prefixed to them when\ncompleting on a future.", "default": true, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.autoIter.enable": { - "markdownDescription": "Toggles the additional completions that automatically show method calls with `iter()` or `into_iter()` prefixed to them when completing on a type that has them.", + "markdownDescription": "Show method call completions with `iter()` or `into_iter()` prefixed to them when\ncompleting on a type that has them.", "default": true, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.autoimport.enable": { - "markdownDescription": "Toggles the additional completions that automatically add imports when completed.\nNote that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.", + "markdownDescription": "Show completions that automatically add imports when completed.\n\nNote that your client must specify the `additionalTextEdits` LSP client capability to\ntruly have this feature enabled.", "default": true, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.autoimport.exclude": { - "markdownDescription": "A list of full paths to items to exclude from auto-importing completions.\n\nTraits in this list won't have their methods suggested in completions unless the trait\nis in scope.\n\nYou can either specify a string path which defaults to type \"always\" or use the more verbose\nform `{ \"path\": \"path::to::item\", type: \"always\" }`.\n\nFor traits the type \"methods\" can be used to only exclude the methods but not the trait itself.\n\nThis setting also inherits `#rust-analyzer.completion.excludeTraits#`.", + "markdownDescription": "A list of full paths to items to exclude from auto-importing completions.\n\nTraits in this list won't have their methods suggested in completions unless the trait\nis in scope.\n\nYou can either specify a string path which defaults to type \"always\" or use the more\nverbose form `{ \"path\": \"path::to::item\", type: \"always\" }`.\n\nFor traits the type \"methods\" can be used to only exclude the methods but not the trait\nitself.\n\nThis setting also inherits `#rust-analyzer.completion.excludeTraits#`.", "default": [ { "path": "core::borrow::Borrow", @@ -1251,20 +1338,20 @@ } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.autoself.enable": { - "markdownDescription": "Toggles the additional completions that automatically show method calls and field accesses\nwith `self` prefixed to them when inside a method.", + "markdownDescription": "Show method calls and field access completions with `self` prefixed to them when\ninside a method.", "default": true, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.callable.snippets": { - "markdownDescription": "Whether to add parenthesis and argument snippets when completing function.", + "markdownDescription": "Add parenthesis and argument snippets when completing function.", "default": "fill_arguments", "type": "string", "enum": [ @@ -1281,10 +1368,10 @@ } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.excludeTraits": { - "markdownDescription": "A list of full paths to traits whose methods to exclude from completion.\n\nMethods from these traits won't be completed, even if the trait is in scope. However, they will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or `T where T: Trait`.\n\nNote that the trait themselves can still be completed.", + "markdownDescription": "A list of full paths to traits whose methods to exclude from completion.\n\nMethods from these traits won't be completed, even if the trait is in scope. However,\nthey will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or\n`T where T: Trait`.\n\nNote that the trait themselves can still be completed.", "default": [], "type": "array", "items": { @@ -1294,27 +1381,27 @@ } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.fullFunctionSignatures.enable": { - "markdownDescription": "Whether to show full function/method signatures in completion docs.", + "markdownDescription": "Show full function / method signatures in completion docs.", "default": false, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.hideDeprecated": { - "markdownDescription": "Whether to omit deprecated items from autocompletion. By default they are marked as deprecated but not hidden.", + "markdownDescription": "Omit deprecated items from completions. By default they are marked as deprecated but not\nhidden.", "default": false, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.limit": { "markdownDescription": "Maximum number of completions to return. If `None`, the limit is infinite.", @@ -1328,27 +1415,27 @@ } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.postfix.enable": { - "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc.", + "markdownDescription": "Show postfix snippets like `dbg`, `if`, `not`, etc.", "default": true, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.privateEditable.enable": { - "markdownDescription": "Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.", + "markdownDescription": "Show completions of private items and fields that are defined in the current workspace\neven if they are not visible at the current position.", "default": false, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.snippets.custom": { "markdownDescription": "Custom completion snippets.", @@ -1398,17 +1485,17 @@ } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.termSearch.enable": { - "markdownDescription": "Whether to enable term search based snippets like `Some(foo.bar().baz())`.", + "markdownDescription": "Enable term search based snippets like `Some(foo.bar().baz())`.", "default": false, "type": "boolean" } } }, { - "title": "completion", + "title": "Completion", "properties": { "rust-analyzer.completion.termSearch.fuel": { "markdownDescription": "Term search fuel in \"units of work\" for autocompletion (Defaults to 1000).", @@ -1419,7 +1506,7 @@ } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.disabled": { "markdownDescription": "List of rust-analyzer diagnostics to disable.", @@ -1433,50 +1520,50 @@ } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.enable": { - "markdownDescription": "Whether to show native rust-analyzer diagnostics.", + "markdownDescription": "Show native rust-analyzer diagnostics.", "default": true, "type": "boolean" } } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.experimental.enable": { - "markdownDescription": "Whether to show experimental rust-analyzer diagnostics that might\nhave more false positives than usual.", + "markdownDescription": "Show experimental rust-analyzer diagnostics that might have more false positives than\nusual.", "default": false, "type": "boolean" } } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.remapPrefix": { - "markdownDescription": "Map of prefixes to be substituted when parsing diagnostic file paths.\nThis should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.", + "markdownDescription": "Map of prefixes to be substituted when parsing diagnostic file paths. This should be the\nreverse mapping of what is passed to `rustc` as `--remap-path-prefix`.", "default": {}, "type": "object" } } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.styleLints.enable": { - "markdownDescription": "Whether to run additional style lints.", + "markdownDescription": "Run additional style lints.", "default": false, "type": "boolean" } } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.warningsAsHint": { - "markdownDescription": "List of warnings that should be displayed with hint severity.\n\nThe warnings will be indicated by faded text or three dots in code\nand will not show up in the `Problems Panel`.", + "markdownDescription": "List of warnings that should be displayed with hint severity.\n\nThe warnings will be indicated by faded text or three dots in code and will not show up\nin the `Problems Panel`.", "default": [], "type": "array", "items": { @@ -1486,10 +1573,10 @@ } }, { - "title": "diagnostics", + "title": "Diagnostics", "properties": { "rust-analyzer.diagnostics.warningsAsInfo": { - "markdownDescription": "List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code\nand a blue icon in the `Problems Panel`.", + "markdownDescription": "List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code and a blue icon in\nthe `Problems Panel`.", "default": [], "type": "array", "items": { @@ -1499,10 +1586,10 @@ } }, { - "title": "files", + "title": "Files", "properties": { "rust-analyzer.files.exclude": { - "markdownDescription": "These paths (file/directories) will be ignored by rust-analyzer. They are\nrelative to the workspace root, and globs are not supported. You may\nalso need to add the folders to Code's `files.watcherExclude`.", + "markdownDescription": "List of files to ignore\n\nThese paths (file/directories) will be ignored by rust-analyzer. They are relative to\nthe workspace root, and globs are not supported. You may also need to add the folders to\nCode's `files.watcherExclude`.", "default": [], "type": "array", "items": { @@ -1512,7 +1599,7 @@ } }, { - "title": "files", + "title": "Files", "properties": { "rust-analyzer.files.watcher": { "markdownDescription": "Controls file watching implementation.", @@ -1530,167 +1617,167 @@ } }, { - "title": "highlightRelated", + "title": "Highlight Related", "properties": { "rust-analyzer.highlightRelated.branchExitPoints.enable": { - "markdownDescription": "Enables highlighting of related return values while the cursor is on any `match`, `if`, or match arm arrow (`=>`).", + "markdownDescription": "Highlight related return values while the cursor is on any `match`, `if`, or match arm\narrow (`=>`).", "default": true, "type": "boolean" } } }, { - "title": "highlightRelated", + "title": "Highlight Related", "properties": { "rust-analyzer.highlightRelated.breakPoints.enable": { - "markdownDescription": "Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.", + "markdownDescription": "Highlight related references while the cursor is on `break`, `loop`, `while`, or `for`\nkeywords.", "default": true, "type": "boolean" } } }, { - "title": "highlightRelated", + "title": "Highlight Related", "properties": { "rust-analyzer.highlightRelated.closureCaptures.enable": { - "markdownDescription": "Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.", + "markdownDescription": "Highlight all captures of a closure while the cursor is on the `|` or move keyword of a closure.", "default": true, "type": "boolean" } } }, { - "title": "highlightRelated", + "title": "Highlight Related", "properties": { "rust-analyzer.highlightRelated.exitPoints.enable": { - "markdownDescription": "Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).", + "markdownDescription": "Highlight all exit points while the cursor is on any `return`, `?`, `fn`, or return type\narrow (`->`).", "default": true, "type": "boolean" } } }, { - "title": "highlightRelated", + "title": "Highlight Related", "properties": { "rust-analyzer.highlightRelated.references.enable": { - "markdownDescription": "Enables highlighting of related references while the cursor is on any identifier.", + "markdownDescription": "Highlight related references while the cursor is on any identifier.", "default": true, "type": "boolean" } } }, { - "title": "highlightRelated", + "title": "Highlight Related", "properties": { "rust-analyzer.highlightRelated.yieldPoints.enable": { - "markdownDescription": "Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.", + "markdownDescription": "Highlight all break points for a loop or block context while the cursor is on any\n`async` or `await` keywords.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.debug.enable": { - "markdownDescription": "Whether to show `Debug` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.", + "markdownDescription": "Show `Debug` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.enable": { - "markdownDescription": "Whether to show HoverActions in Rust files.", + "markdownDescription": "Show HoverActions in Rust files.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.gotoTypeDef.enable": { - "markdownDescription": "Whether to show `Go to Type Definition` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.", + "markdownDescription": "Show `Go to Type Definition` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.implementations.enable": { - "markdownDescription": "Whether to show `Implementations` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.", + "markdownDescription": "Show `Implementations` action. Only applies when `#rust-analyzer.hover.actions.enable#`\nis set.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.references.enable": { - "markdownDescription": "Whether to show `References` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.", + "markdownDescription": "Show `References` action. Only applies when `#rust-analyzer.hover.actions.enable#` is\nset.", "default": false, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.run.enable": { - "markdownDescription": "Whether to show `Run` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.", + "markdownDescription": "Show `Run` action. Only applies when `#rust-analyzer.hover.actions.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.actions.updateTest.enable": { - "markdownDescription": "Whether to show `Update Test` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` and `#rust-analyzer.hover.actions.run.enable#` are set.", + "markdownDescription": "Show `Update Test` action. Only applies when `#rust-analyzer.hover.actions.enable#` and\n`#rust-analyzer.hover.actions.run.enable#` are set.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.documentation.enable": { - "markdownDescription": "Whether to show documentation on hover.", + "markdownDescription": "Show documentation on hover.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.documentation.keywords.enable": { - "markdownDescription": "Whether to show keyword hover popups. Only applies when\n`#rust-analyzer.hover.documentation.enable#` is set.", + "markdownDescription": "Show keyword hover popups. Only applies when\n`#rust-analyzer.hover.documentation.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.dropGlue.enable": { - "markdownDescription": "Whether to show drop glue information on hover.", + "markdownDescription": "Show drop glue information on hover.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.links.enable": { "markdownDescription": "Use markdown syntax for links on hover.", @@ -1700,10 +1787,10 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.maxSubstitutionLength": { - "markdownDescription": "Whether to show what types are used as generic arguments in calls etc. on hover, and what is their max length to show such types, beyond it they will be shown with ellipsis.\n\nThis can take three values: `null` means \"unlimited\", the string `\"hide\"` means to not show generic substitutions at all, and a number means to limit them to X characters.\n\nThe default is 20 characters.", + "markdownDescription": "Show what types are used as generic arguments in calls etc. on hover, and limit the max\nlength to show such types, beyond which they will be shown with ellipsis.\n\nThis can take three values: `null` means \"unlimited\", the string `\"hide\"` means to not\nshow generic substitutions at all, and a number means to limit them to X characters.\n\nThe default is 20 characters.", "default": 20, "anyOf": [ { @@ -1723,7 +1810,7 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.memoryLayout.alignment": { "markdownDescription": "How to render the align information in a memory layout hover.", @@ -1750,17 +1837,17 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.memoryLayout.enable": { - "markdownDescription": "Whether to show memory layout data on hover.", + "markdownDescription": "Show memory layout data on hover.", "default": true, "type": "boolean" } } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.memoryLayout.niches": { "markdownDescription": "How to render the niche information in a memory layout hover.", @@ -1773,7 +1860,7 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.memoryLayout.offset": { "markdownDescription": "How to render the offset information in a memory layout hover.", @@ -1800,7 +1887,7 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.memoryLayout.padding": { "markdownDescription": "How to render the padding information in a memory layout hover.", @@ -1827,7 +1914,7 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.memoryLayout.size": { "markdownDescription": "How to render the size information in a memory layout hover.", @@ -1854,7 +1941,7 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.show.enumVariants": { "markdownDescription": "How many variants of an enum to display when hovering on. Show none if empty.", @@ -1868,10 +1955,10 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.show.fields": { - "markdownDescription": "How many fields of a struct, variant or union to display when hovering on. Show none if empty.", + "markdownDescription": "How many fields of a struct, variant or union to display when hovering on. Show none if\nempty.", "default": 5, "type": [ "null", @@ -1882,7 +1969,7 @@ } }, { - "title": "hover", + "title": "Hover", "properties": { "rust-analyzer.hover.show.traitAssocItems": { "markdownDescription": "How many associated items of a trait to display when hovering a trait.", @@ -1896,17 +1983,17 @@ } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.granularity.enforce": { - "markdownDescription": "Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.", + "markdownDescription": "Enforce the import granularity setting for all files. If set to false rust-analyzer will\ntry to keep import styles consistent per file.", "default": false, "type": "boolean" } } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.granularity.group": { "markdownDescription": "How imports should be grouped into use statements.", @@ -1930,27 +2017,27 @@ } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.group.enable": { - "markdownDescription": "Group inserted imports by the [following order](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are separated by newlines.", + "markdownDescription": "Group inserted imports by the [following\norder](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are\nseparated by newlines.", "default": true, "type": "boolean" } } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.merge.glob": { - "markdownDescription": "Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.", + "markdownDescription": "Allow import insertion to merge new imports into single path glob imports like `use\nstd::fmt::*;`.", "default": true, "type": "boolean" } } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.preferNoStd": { "markdownDescription": "Prefer to unconditionally use imports of the core and alloc crate, over the std crate.", @@ -1960,17 +2047,17 @@ } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.preferPrelude": { - "markdownDescription": "Whether to prefer import paths containing a `prelude` module.", + "markdownDescription": "Prefer import paths containing a `prelude` module.", "default": false, "type": "boolean" } } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.prefix": { "markdownDescription": "The path structure for newly inserted paths to use.", @@ -1990,47 +2077,47 @@ } }, { - "title": "imports", + "title": "Imports", "properties": { "rust-analyzer.imports.prefixExternPrelude": { - "markdownDescription": "Whether to prefix external (including std, core) crate imports with `::`. e.g. \"use ::std::io::Read;\".", + "markdownDescription": "Prefix external (including std, core) crate imports with `::`.\n\nE.g. `use ::std::io::Read;`.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.bindingModeHints.enable": { - "markdownDescription": "Whether to show inlay type hints for binding modes.", + "markdownDescription": "Show inlay type hints for binding modes.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.chainingHints.enable": { - "markdownDescription": "Whether to show inlay type hints for method chains.", + "markdownDescription": "Show inlay type hints for method chains.", "default": true, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.closingBraceHints.enable": { - "markdownDescription": "Whether to show inlay hints after a closing `}` to indicate what item it belongs to.", + "markdownDescription": "Show inlay hints after a closing `}` to indicate what item it belongs to.", "default": true, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.closingBraceHints.minLines": { "markdownDescription": "Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1\nto always show them).", @@ -2041,20 +2128,20 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.closureCaptureHints.enable": { - "markdownDescription": "Whether to show inlay hints for closure captures.", + "markdownDescription": "Show inlay hints for closure captures.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.closureReturnTypeHints.enable": { - "markdownDescription": "Whether to show inlay type hints for return types of closures.", + "markdownDescription": "Show inlay type hints for return types of closures.", "default": "never", "type": "string", "enum": [ @@ -2071,7 +2158,7 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.closureStyle": { "markdownDescription": "Closure notation in type and chaining inlay hints.", @@ -2093,10 +2180,10 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.discriminantHints.enable": { - "markdownDescription": "Whether to show enum variant discriminant hints.", + "markdownDescription": "Show enum variant discriminant hints.", "default": "never", "type": "string", "enum": [ @@ -2113,10 +2200,10 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.expressionAdjustmentHints.enable": { - "markdownDescription": "Whether to show inlay hints for type adjustments.", + "markdownDescription": "Show inlay hints for type adjustments.", "default": "never", "type": "string", "enum": [ @@ -2133,20 +2220,20 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.expressionAdjustmentHints.hideOutsideUnsafe": { - "markdownDescription": "Whether to hide inlay hints for type adjustments outside of `unsafe` blocks.", + "markdownDescription": "Hide inlay hints for type adjustments outside of `unsafe` blocks.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.expressionAdjustmentHints.mode": { - "markdownDescription": "Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).", + "markdownDescription": "Show inlay hints as postfix ops (`.*` instead of `*`, etc).", "default": "prefix", "type": "string", "enum": [ @@ -2165,60 +2252,60 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.genericParameterHints.const.enable": { - "markdownDescription": "Whether to show const generic parameter name inlay hints.", + "markdownDescription": "Show const generic parameter name inlay hints.", "default": true, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.genericParameterHints.lifetime.enable": { - "markdownDescription": "Whether to show generic lifetime parameter name inlay hints.", + "markdownDescription": "Show generic lifetime parameter name inlay hints.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.genericParameterHints.type.enable": { - "markdownDescription": "Whether to show generic type parameter name inlay hints.", + "markdownDescription": "Show generic type parameter name inlay hints.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.implicitDrops.enable": { - "markdownDescription": "Whether to show implicit drop hints.", + "markdownDescription": "Show implicit drop hints.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.implicitSizedBoundHints.enable": { - "markdownDescription": "Whether to show inlay hints for the implied type parameter `Sized` bound.", + "markdownDescription": "Show inlay hints for the implied type parameter `Sized` bound.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.lifetimeElisionHints.enable": { - "markdownDescription": "Whether to show inlay type hints for elided lifetimes in function signatures.", + "markdownDescription": "Show inlay type hints for elided lifetimes in function signatures.", "default": "never", "type": "string", "enum": [ @@ -2235,17 +2322,17 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames": { - "markdownDescription": "Whether to prefer using parameter names as the name for elided lifetime hints if possible.", + "markdownDescription": "Prefer using parameter names as the name for elided lifetime hints if possible.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.maxLength": { "markdownDescription": "Maximum length for inlay hints. Set to null to have an unlimited length.", @@ -2259,30 +2346,30 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.parameterHints.enable": { - "markdownDescription": "Whether to show function parameter name inlay hints at the call\nsite.", + "markdownDescription": "Show function parameter name inlay hints at the call site.", "default": true, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.rangeExclusiveHints.enable": { - "markdownDescription": "Whether to show exclusive range inlay hints.", + "markdownDescription": "Show exclusive range inlay hints.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.reborrowHints.enable": { - "markdownDescription": "Whether to show inlay hints for compiler inserted reborrows.\nThis setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.", + "markdownDescription": "Show inlay hints for compiler inserted reborrows.\n\nThis setting is deprecated in favor of\n#rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.", "default": "never", "type": "string", "enum": [ @@ -2299,7 +2386,7 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.renderColons": { "markdownDescription": "Whether to render leading colons for type hints, and trailing colons for parameter hints.", @@ -2309,57 +2396,57 @@ } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.typeHints.enable": { - "markdownDescription": "Whether to show inlay type hints for variables.", + "markdownDescription": "Show inlay type hints for variables.", "default": true, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.typeHints.hideClosureInitialization": { - "markdownDescription": "Whether to hide inlay type hints for `let` statements that initialize to a closure.\nOnly applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.", + "markdownDescription": "Hide inlay type hints for `let` statements that initialize to a closure.\n\nOnly applies to closures with blocks, same as\n`#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.typeHints.hideClosureParameter": { - "markdownDescription": "Whether to hide inlay parameter type hints for closures.", + "markdownDescription": "Hide inlay parameter type hints for closures.", "default": false, "type": "boolean" } } }, { - "title": "inlayHints", + "title": "Inlay Hints", "properties": { "rust-analyzer.inlayHints.typeHints.hideNamedConstructor": { - "markdownDescription": "Whether to hide inlay type hints for constructors.", + "markdownDescription": "Hide inlay type hints for constructors.", "default": false, "type": "boolean" } } }, { - "title": "interpret", + "title": "Interpret", "properties": { "rust-analyzer.interpret.tests": { - "markdownDescription": "Enables the experimental support for interpreting tests.", + "markdownDescription": "Enable the experimental support for interpreting tests.", "default": false, "type": "boolean" } } }, { - "title": "joinLines", + "title": "Join Lines", "properties": { "rust-analyzer.joinLines.joinAssignments": { "markdownDescription": "Join lines merges consecutive declaration and initialization of an assignment.", @@ -2369,7 +2456,7 @@ } }, { - "title": "joinLines", + "title": "Join Lines", "properties": { "rust-analyzer.joinLines.joinElseIf": { "markdownDescription": "Join lines inserts else between consecutive ifs.", @@ -2379,7 +2466,7 @@ } }, { - "title": "joinLines", + "title": "Join Lines", "properties": { "rust-analyzer.joinLines.removeTrailingComma": { "markdownDescription": "Join lines removes trailing commas.", @@ -2389,7 +2476,7 @@ } }, { - "title": "joinLines", + "title": "Join Lines", "properties": { "rust-analyzer.joinLines.unwrapTrivialBlock": { "markdownDescription": "Join lines unwraps trivial blocks.", @@ -2399,37 +2486,37 @@ } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.debug.enable": { - "markdownDescription": "Whether to show `Debug` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `Debug` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.enable": { - "markdownDescription": "Whether to show CodeLens in Rust files.", + "markdownDescription": "Show CodeLens in Rust files.", "default": true, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.implementations.enable": { - "markdownDescription": "Whether to show `Implementations` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `Implementations` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.location": { "markdownDescription": "Where to render annotations.", @@ -2447,70 +2534,70 @@ } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.references.adt.enable": { - "markdownDescription": "Whether to show `References` lens for Struct, Enum, and Union.\nOnly applies when `#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `References` lens for Struct, Enum, and Union. Only applies when\n`#rust-analyzer.lens.enable#` is set.", "default": false, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.references.enumVariant.enable": { - "markdownDescription": "Whether to show `References` lens for Enum Variants.\nOnly applies when `#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `References` lens for Enum Variants. Only applies when\n`#rust-analyzer.lens.enable#` is set.", "default": false, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.references.method.enable": { - "markdownDescription": "Whether to show `Method References` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `Method References` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", "default": false, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.references.trait.enable": { - "markdownDescription": "Whether to show `References` lens for Trait.\nOnly applies when `#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `References` lens for Trait. Only applies when `#rust-analyzer.lens.enable#` is\nset.", "default": false, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.run.enable": { - "markdownDescription": "Whether to show `Run` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.", + "markdownDescription": "Show `Run` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", "default": true, "type": "boolean" } } }, { - "title": "lens", + "title": "Lens", "properties": { "rust-analyzer.lens.updateTest.enable": { - "markdownDescription": "Whether to show `Update Test` lens. Only applies when\n`#rust-analyzer.lens.enable#` and `#rust-analyzer.lens.run.enable#` are set.", + "markdownDescription": "Show `Update Test` lens. Only applies when `#rust-analyzer.lens.enable#` and\n`#rust-analyzer.lens.run.enable#` are set.", "default": true, "type": "boolean" } } }, { - "title": "general", + "title": "rust-analyzer", "properties": { "rust-analyzer.linkedProjects": { - "markdownDescription": "Disable project auto-discovery in favor of explicitly specified set\nof projects.\n\nElements must be paths pointing to `Cargo.toml`,\n`rust-project.json`, `.rs` files (which will be treated as standalone files) or JSON\nobjects in `rust-project.json` format.", + "markdownDescription": "Disable project auto-discovery in favor of explicitly specified set of projects.\n\nElements must be paths pointing to `Cargo.toml`, `rust-project.json`, `.rs` files (which\nwill be treated as standalone files) or JSON objects in `rust-project.json` format.", "default": [], "type": "array", "items": { @@ -2523,7 +2610,7 @@ } }, { - "title": "lru", + "title": "Lru", "properties": { "rust-analyzer.lru.capacity": { "markdownDescription": "Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.", @@ -2538,30 +2625,30 @@ } }, { - "title": "lru", + "title": "Lru", "properties": { "rust-analyzer.lru.query.capacities": { - "markdownDescription": "Sets the LRU capacity of the specified queries.", + "markdownDescription": "The LRU capacity of the specified queries.", "default": {}, "type": "object" } } }, { - "title": "notifications", + "title": "Notifications", "properties": { "rust-analyzer.notifications.cargoTomlNotFound": { - "markdownDescription": "Whether to show `can't find Cargo.toml` error message.", + "markdownDescription": "Show `can't find Cargo.toml` error message.", "default": true, "type": "boolean" } } }, { - "title": "general", + "title": "rust-analyzer", "properties": { "rust-analyzer.numThreads": { - "markdownDescription": "How many worker threads in the main loop. The default `null` means to pick automatically.", + "markdownDescription": "The number of worker threads in the main loop. The default `null` means to pick\nautomatically.", "default": null, "anyOf": [ { @@ -2588,7 +2675,7 @@ } }, { - "title": "procMacro", + "title": "Proc Macro", "properties": { "rust-analyzer.procMacro.attributes.enable": { "markdownDescription": "Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.", @@ -2598,7 +2685,7 @@ } }, { - "title": "procMacro", + "title": "Proc Macro", "properties": { "rust-analyzer.procMacro.enable": { "markdownDescription": "Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.", @@ -2608,7 +2695,7 @@ } }, { - "title": "procMacro", + "title": "Proc Macro", "properties": { "rust-analyzer.procMacro.ignored": { "markdownDescription": "These proc-macros will be ignored when trying to expand them.\n\nThis config takes a map of crate names with the exported proc-macro names to ignore as values.", @@ -2618,7 +2705,7 @@ } }, { - "title": "procMacro", + "title": "Proc Macro", "properties": { "rust-analyzer.procMacro.server": { "markdownDescription": "Internal config, path to proc-macro server executable.", @@ -2631,7 +2718,7 @@ } }, { - "title": "references", + "title": "References", "properties": { "rust-analyzer.references.excludeImports": { "markdownDescription": "Exclude imports from find-all-references.", @@ -2641,7 +2728,7 @@ } }, { - "title": "references", + "title": "References", "properties": { "rust-analyzer.references.excludeTests": { "markdownDescription": "Exclude tests from find-all-references and call-hierarchy.", @@ -2651,7 +2738,7 @@ } }, { - "title": "runnables", + "title": "Runnables", "properties": { "rust-analyzer.runnables.command": { "markdownDescription": "Command to be executed instead of 'cargo' for runnables.", @@ -2664,7 +2751,7 @@ } }, { - "title": "runnables", + "title": "Runnables", "properties": { "rust-analyzer.runnables.extraArgs": { "markdownDescription": "Additional arguments to be passed to cargo for runnables such as\ntests or binaries. For example, it may be `--release`.", @@ -2677,7 +2764,7 @@ } }, { - "title": "runnables", + "title": "Runnables", "properties": { "rust-analyzer.runnables.extraTestBinaryArgs": { "markdownDescription": "Additional arguments to be passed through Cargo to launched tests, benchmarks, or\ndoc-tests.\n\nUnless the launched target uses a\n[custom test harness](https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-harness-field),\nthey will end up being interpreted as options to\n[`rustc`’s built-in test harness (“libtest”)](https://doc.rust-lang.org/rustc/tests/index.html#cli-arguments).", @@ -2692,7 +2779,7 @@ } }, { - "title": "rustc", + "title": "Rustc", "properties": { "rust-analyzer.rustc.source": { "markdownDescription": "Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private\nprojects, or \"discover\" to try to automatically find it if the `rustc-dev` component\nis installed.\n\nAny project which uses rust-analyzer with the rustcPrivate\ncrates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.\n\nThis option does not take effect until rust-analyzer is restarted.", @@ -2705,7 +2792,7 @@ } }, { - "title": "rustfmt", + "title": "Rustfmt", "properties": { "rust-analyzer.rustfmt.extraArgs": { "markdownDescription": "Additional arguments to `rustfmt`.", @@ -2718,7 +2805,7 @@ } }, { - "title": "rustfmt", + "title": "Rustfmt", "properties": { "rust-analyzer.rustfmt.overrideCommand": { "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for\nformatting. This should be the equivalent of `rustfmt` here, and\nnot that of `cargo fmt`. The file contents will be passed on the\nstandard input and the formatted result will be read from the\nstandard output.", @@ -2734,7 +2821,7 @@ } }, { - "title": "rustfmt", + "title": "Rustfmt", "properties": { "rust-analyzer.rustfmt.rangeFormatting.enable": { "markdownDescription": "Enables the use of rustfmt's unstable range formatting command for the\n`textDocument/rangeFormatting` request. The rustfmt option is unstable and only\navailable on a nightly build.", @@ -2744,7 +2831,7 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.doc.comment.inject.enable": { "markdownDescription": "Inject additional highlighting into doc comments.\n\nWhen enabled, rust-analyzer will highlight rust source in doc comments as well as intra\ndoc links.", @@ -2754,17 +2841,17 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.nonStandardTokens": { - "markdownDescription": "Whether the server is allowed to emit non-standard tokens and modifiers.", + "markdownDescription": "Emit non-standard tokens and modifiers\n\nWhen enabled, rust-analyzer will emit tokens and modifiers that are not part of the\nstandard set of semantic tokens.", "default": true, "type": "boolean" } } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.operator.enable": { "markdownDescription": "Use semantic tokens for operators.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for operator tokens when\nthey are tagged with modifiers.", @@ -2774,7 +2861,7 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.operator.specialization.enable": { "markdownDescription": "Use specialized semantic tokens for operators.\n\nWhen enabled, rust-analyzer will emit special token types for operator tokens instead\nof the generic `operator` token type.", @@ -2784,7 +2871,7 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.punctuation.enable": { "markdownDescription": "Use semantic tokens for punctuation.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when\nthey are tagged with modifiers or have a special role.", @@ -2794,7 +2881,7 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang": { "markdownDescription": "When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro\ncalls.", @@ -2804,7 +2891,7 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.punctuation.specialization.enable": { "markdownDescription": "Use specialized semantic tokens for punctuation.\n\nWhen enabled, rust-analyzer will emit special token types for punctuation tokens instead\nof the generic `punctuation` token type.", @@ -2814,7 +2901,7 @@ } }, { - "title": "semanticHighlighting", + "title": "Semantic Highlighting", "properties": { "rust-analyzer.semanticHighlighting.strings.enable": { "markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.", @@ -2824,7 +2911,7 @@ } }, { - "title": "signatureInfo", + "title": "Signature Info", "properties": { "rust-analyzer.signatureInfo.detail": { "markdownDescription": "Show full signature of the callable. Only shows parameters if disabled.", @@ -2842,7 +2929,7 @@ } }, { - "title": "signatureInfo", + "title": "Signature Info", "properties": { "rust-analyzer.signatureInfo.documentation.enable": { "markdownDescription": "Show documentation.", @@ -2852,10 +2939,10 @@ } }, { - "title": "typing", + "title": "Typing", "properties": { "rust-analyzer.typing.triggerChars": { - "markdownDescription": "Specify the characters allowed to invoke special on typing triggers.\n- typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing expression\n- typing `=` between two expressions adds `;` when in statement position\n- typing `=` to turn an assignment into an equality comparison removes `;` when in expression position\n- typing `.` in a chain method call auto-indents\n- typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the expression\n- typing `{` in a use item adds a closing `}` in the right place\n- typing `>` to complete a return type `->` will insert a whitespace after it\n- typing `<` in a path or type position inserts a closing `>` after the path or type.", + "markdownDescription": "Specify the characters allowed to invoke special on typing triggers.\n\n- typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing\n expression\n- typing `=` between two expressions adds `;` when in statement position\n- typing `=` to turn an assignment into an equality comparison removes `;` when in\n expression position\n- typing `.` in a chain method call auto-indents\n- typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the\n expression\n- typing `{` in a use item adds a closing `}` in the right place\n- typing `>` to complete a return type `->` will insert a whitespace after it\n- typing `<` in a path or type position inserts a closing `>` after the path or type.", "default": "=.", "type": [ "null", @@ -2865,7 +2952,7 @@ } }, { - "title": "vfs", + "title": "Vfs", "properties": { "rust-analyzer.vfs.extraIncludes": { "markdownDescription": "Additional paths to include in the VFS. Generally for code that is\ngenerated or otherwise managed by a build system outside of Cargo,\nthough Cargo might be the eventual consumer.", @@ -2878,10 +2965,10 @@ } }, { - "title": "workspace", + "title": "Workspace", "properties": { "rust-analyzer.workspace.discoverConfig": { - "markdownDescription": "Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].\n\n[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.\n`progress_label` is used for the title in progress indicators, whereas `files_to_watch`\nis used to determine which build system-specific files should be watched in order to\nreload rust-analyzer.\n\nBelow is an example of a valid configuration:\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n \"command\": [\n \"rust-project\",\n \"develop-json\"\n ],\n \"progressLabel\": \"rust-analyzer\",\n \"filesToWatch\": [\n \"BUCK\"\n ]\n}\n```\n\n## On `DiscoverWorkspaceConfig::command`\n\n**Warning**: This format is provisional and subject to change.\n\n[`DiscoverWorkspaceConfig::command`] *must* return a JSON object\ncorresponding to `DiscoverProjectData::Finished`:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n Error { error: String, source: Option },\n Progress { message: String },\n}\n```\n\nAs JSON, `DiscoverProjectData::Finished` is:\n\n```json\n{\n // the internally-tagged representation of the enum.\n \"kind\": \"finished\",\n // the file used by a non-Cargo build system to define\n // a package or target.\n \"buildfile\": \"rust-analyzer/BUILD\",\n // the contents of a rust-project.json, elided for brevity\n \"project\": {\n \"sysroot\": \"foo\",\n \"crates\": []\n }\n}\n```\n\nIt is encouraged, but not required, to use the other variants on\n`DiscoverProjectData` to provide a more polished end-user experience.\n\n`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`,\nwhich will be substituted with the JSON-serialized form of the following\nenum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n Path(AbsPathBuf),\n Buildfile(AbsPathBuf),\n}\n```\n\nThe JSON representation of `DiscoverArgument::Path` is:\n\n```json\n{\n \"path\": \"src/main.rs\"\n}\n```\n\nSimilarly, the JSON representation of `DiscoverArgument::Buildfile` is:\n\n```json\n{\n \"buildfile\": \"BUILD\"\n}\n```\n\n`DiscoverArgument::Path` is used to find and generate a `rust-project.json`,\nand therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to\nto update an existing workspace. As a reference for implementors,\nbuck2's `rust-project` will likely be useful:\nhttps://github.com/facebook/buck2/tree/main/integrations/rust-project.", + "markdownDescription": "Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].\n\n[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.\n`progress_label` is used for the title in progress indicators, whereas `files_to_watch`\nis used to determine which build system-specific files should be watched in order to\nreload rust-analyzer.\n\nBelow is an example of a valid configuration:\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n \"command\": [\n \"rust-project\",\n \"develop-json\"\n ],\n \"progressLabel\": \"rust-analyzer\",\n \"filesToWatch\": [\n \"BUCK\"\n ]\n}\n```\n\n## On `DiscoverWorkspaceConfig::command`\n\n**Warning**: This format is provisional and subject to change.\n\n[`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to\n`DiscoverProjectData::Finished`:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n Error { error: String, source: Option },\n Progress { message: String },\n}\n```\n\nAs JSON, `DiscoverProjectData::Finished` is:\n\n```json\n{\n // the internally-tagged representation of the enum.\n \"kind\": \"finished\",\n // the file used by a non-Cargo build system to define\n // a package or target.\n \"buildfile\": \"rust-analyzer/BUILD\",\n // the contents of a rust-project.json, elided for brevity\n \"project\": {\n \"sysroot\": \"foo\",\n \"crates\": []\n }\n}\n```\n\nIt is encouraged, but not required, to use the other variants on `DiscoverProjectData`\nto provide a more polished end-user experience.\n\n`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be\nsubstituted with the JSON-serialized form of the following enum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n Path(AbsPathBuf),\n Buildfile(AbsPathBuf),\n}\n```\n\nThe JSON representation of `DiscoverArgument::Path` is:\n\n```json\n{\n \"path\": \"src/main.rs\"\n}\n```\n\nSimilarly, the JSON representation of `DiscoverArgument::Buildfile` is:\n\n```json\n{\n \"buildfile\": \"BUILD\"\n}\n```\n\n`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and\ntherefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an\nexisting workspace. As a reference for implementors, buck2's `rust-project` will likely\nbe useful: https://github.com/facebook/buck2/tree/main/integrations/rust-project.", "default": null, "anyOf": [ { @@ -2912,7 +2999,7 @@ } }, { - "title": "workspace", + "title": "Workspace", "properties": { "rust-analyzer.workspace.symbol.search.excludeImports": { "markdownDescription": "Exclude all imports from workspace symbol search.\n\nIn addition to regular imports (which are always excluded),\nthis option removes public imports (better known as re-exports)\nand removes imports that rename the imported symbol.", @@ -2922,7 +3009,7 @@ } }, { - "title": "workspace", + "title": "Workspace", "properties": { "rust-analyzer.workspace.symbol.search.kind": { "markdownDescription": "Workspace symbol search kind.", @@ -2940,7 +3027,7 @@ } }, { - "title": "workspace", + "title": "Workspace", "properties": { "rust-analyzer.workspace.symbol.search.limit": { "markdownDescription": "Limits the number of items returned from a workspace symbol search (Defaults to 128).\nSome clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.\nOther clients requires all results upfront and might require a higher limit.", @@ -2951,7 +3038,7 @@ } }, { - "title": "workspace", + "title": "Workspace", "properties": { "rust-analyzer.workspace.symbol.search.scope": { "markdownDescription": "Workspace symbol search scope.", From 0e3d408c29c7e1b54ba514dfb728eb762719b15b Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Thu, 3 Jul 2025 14:24:58 +0000 Subject: [PATCH 002/118] Remove LtoModuleCodegen Most uses of it either contain a fat or thin lto module. Only WorkItem::LTO could contain both, but splitting that enum variant doesn't complicate things much. --- src/back/lto.rs | 15 +++++++-------- src/lib.rs | 6 +++--- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/src/back/lto.rs b/src/back/lto.rs index 10fce860b7770..e554dd2500bda 100644 --- a/src/back/lto.rs +++ b/src/back/lto.rs @@ -24,7 +24,7 @@ use std::sync::Arc; use gccjit::{Context, OutputKind}; use object::read::archive::ArchiveFile; -use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule, ThinShared}; +use rustc_codegen_ssa::back::lto::{SerializedModule, ThinModule, ThinShared}; use rustc_codegen_ssa::back::symbol_export; use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput}; use rustc_codegen_ssa::traits::*; @@ -176,7 +176,7 @@ pub(crate) fn run_fat( cgcx: &CodegenContext, modules: Vec>, cached_modules: Vec<(SerializedModule, WorkProduct)>, -) -> Result, FatalError> { +) -> Result, FatalError> { let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); let lto_data = prepare_lto(cgcx, dcx)?; @@ -201,7 +201,7 @@ fn fat_lto( mut serialized_modules: Vec<(SerializedModule, CString)>, tmp_path: TempDir, //symbols_below_threshold: &[String], -) -> Result, FatalError> { +) -> Result, FatalError> { let _timer = cgcx.prof.generic_activity("GCC_fat_lto_build_monolithic_module"); info!("going for a fat lto"); @@ -334,7 +334,7 @@ fn fat_lto( // of now. module.module_llvm.temp_dir = Some(tmp_path); - Ok(LtoModuleCodegen::Fat(module)) + Ok(module) } pub struct ModuleBuffer(PathBuf); @@ -358,7 +358,7 @@ pub(crate) fn run_thin( cgcx: &CodegenContext, modules: Vec<(String, ThinBuffer)>, cached_modules: Vec<(SerializedModule, WorkProduct)>, -) -> Result<(Vec>, Vec), FatalError> { +) -> Result<(Vec>, Vec), FatalError> { let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); let lto_data = prepare_lto(cgcx, dcx)?; @@ -427,7 +427,7 @@ fn thin_lto( tmp_path: TempDir, cached_modules: Vec<(SerializedModule, WorkProduct)>, //_symbols_below_threshold: &[String], -) -> Result<(Vec>, Vec), FatalError> { +) -> Result<(Vec>, Vec), FatalError> { let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis"); info!("going for that thin, thin LTO"); @@ -573,8 +573,7 @@ fn thin_lto( }*/ info!(" - {}: re-compiled", module_name); - opt_jobs - .push(LtoModuleCodegen::Thin(ThinModule { shared: shared.clone(), idx: module_index })); + opt_jobs.push(ThinModule { shared: shared.clone(), idx: module_index }); } // Save the current ThinLTO import information for the next compilation diff --git a/src/lib.rs b/src/lib.rs index a912678ef2a10..a151a0ab75534 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -97,7 +97,7 @@ use gccjit::{CType, Context, OptimizationLevel}; use gccjit::{TargetInfo, Version}; use rustc_ast::expand::allocator::AllocatorKind; use rustc_ast::expand::autodiff_attrs::AutoDiffItem; -use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule}; +use rustc_codegen_ssa::back::lto::{SerializedModule, ThinModule}; use rustc_codegen_ssa::back::write::{ CodegenContext, FatLtoInput, ModuleConfig, TargetMachineFactoryFn, }; @@ -361,7 +361,7 @@ impl WriteBackendMethods for GccCodegenBackend { cgcx: &CodegenContext, modules: Vec>, cached_modules: Vec<(SerializedModule, WorkProduct)>, - ) -> Result, FatalError> { + ) -> Result, FatalError> { back::lto::run_fat(cgcx, modules, cached_modules) } @@ -369,7 +369,7 @@ impl WriteBackendMethods for GccCodegenBackend { cgcx: &CodegenContext, modules: Vec<(String, Self::ThinBuffer)>, cached_modules: Vec<(SerializedModule, WorkProduct)>, - ) -> Result<(Vec>, Vec), FatalError> { + ) -> Result<(Vec>, Vec), FatalError> { back::lto::run_thin(cgcx, modules, cached_modules) } From d9f9bcf18f7b8c997daad39ee726c50f1316fa60 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Thu, 3 Jul 2025 14:43:09 +0000 Subject: [PATCH 003/118] Move dcx creation into WriteBackendMethods::codegen --- src/back/write.rs | 4 +++- src/lib.rs | 3 +-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/back/write.rs b/src/back/write.rs index d03d063bdace6..113abe70805b0 100644 --- a/src/back/write.rs +++ b/src/back/write.rs @@ -16,10 +16,12 @@ use crate::{GccCodegenBackend, GccContext}; pub(crate) fn codegen( cgcx: &CodegenContext, - dcx: DiagCtxtHandle<'_>, module: ModuleCodegen, config: &ModuleConfig, ) -> Result { + let dcx = cgcx.create_dcx(); + let dcx = dcx.handle(); + let _timer = cgcx.prof.generic_activity_with_arg("GCC_module_codegen", &*module.name); { let context = &module.module_llvm.context; diff --git a/src/lib.rs b/src/lib.rs index a151a0ab75534..34452bdd2005f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -408,11 +408,10 @@ impl WriteBackendMethods for GccCodegenBackend { fn codegen( cgcx: &CodegenContext, - dcx: DiagCtxtHandle<'_>, module: ModuleCodegen, config: &ModuleConfig, ) -> Result { - back::write::codegen(cgcx, dcx, module, config) + back::write::codegen(cgcx, module, config) } fn prepare_thin( From 9404a1192421c29828167eb6962f5099793619d1 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Thu, 3 Jul 2025 16:09:10 +0000 Subject: [PATCH 004/118] Remove unused config param from WriteBackendMethods::autodiff --- src/lib.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/lib.rs b/src/lib.rs index 34452bdd2005f..8e63ebc849406 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -437,7 +437,6 @@ impl WriteBackendMethods for GccCodegenBackend { _cgcx: &CodegenContext, _module: &ModuleCodegen, _diff_functions: Vec, - _config: &ModuleConfig, ) -> Result<(), FatalError> { unimplemented!() } From bf4daef80cab63ba6d2a0889927e9fce4828c6f9 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Thu, 3 Jul 2025 16:22:32 +0000 Subject: [PATCH 005/118] Merge run_fat_lto, optimize_fat and autodiff into run_and_optimize_fat_lto --- src/lib.rs | 23 ++++++----------------- 1 file changed, 6 insertions(+), 17 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 8e63ebc849406..75c36fffec983 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -357,11 +357,16 @@ impl WriteBackendMethods for GccCodegenBackend { type ThinData = ThinData; type ThinBuffer = ThinBuffer; - fn run_fat_lto( + fn run_and_optimize_fat_lto( cgcx: &CodegenContext, modules: Vec>, cached_modules: Vec<(SerializedModule, WorkProduct)>, + diff_fncs: Vec, ) -> Result, FatalError> { + if !diff_fncs.is_empty() { + unimplemented!(); + } + back::lto::run_fat(cgcx, modules, cached_modules) } @@ -391,14 +396,6 @@ impl WriteBackendMethods for GccCodegenBackend { Ok(()) } - fn optimize_fat( - _cgcx: &CodegenContext, - _module: &mut ModuleCodegen, - ) -> Result<(), FatalError> { - // TODO(antoyo) - Ok(()) - } - fn optimize_thin( cgcx: &CodegenContext, thin: ThinModule, @@ -432,14 +429,6 @@ impl WriteBackendMethods for GccCodegenBackend { ) -> Result, FatalError> { back::write::link(cgcx, dcx, modules) } - - fn autodiff( - _cgcx: &CodegenContext, - _module: &ModuleCodegen, - _diff_functions: Vec, - ) -> Result<(), FatalError> { - unimplemented!() - } } /// This is the entrypoint for a hot plugged rustc_codegen_gccjit From 7cce6aff07fa81c4a69deec899a1b87348f727de Mon Sep 17 00:00:00 2001 From: Daniel Paoliello Date: Thu, 3 Jul 2025 09:17:48 -0700 Subject: [PATCH 006/118] Make __rust_alloc_error_handler_should_panic a function --- src/allocator.rs | 44 +++++++++++++++++++++++++++++++++----------- 1 file changed, 33 insertions(+), 11 deletions(-) diff --git a/src/allocator.rs b/src/allocator.rs index cf8aa500c778f..0d8dc93274f9b 100644 --- a/src/allocator.rs +++ b/src/allocator.rs @@ -1,6 +1,6 @@ -use gccjit::{Context, FunctionType, GlobalKind, ToRValue, Type}; #[cfg(feature = "master")] -use gccjit::{FnAttribute, VarAttribute}; +use gccjit::FnAttribute; +use gccjit::{Context, FunctionType, RValue, ToRValue, Type}; use rustc_ast::expand::allocator::{ ALLOCATOR_METHODS, AllocatorKind, AllocatorTy, NO_ALLOC_SHIM_IS_UNSTABLE, alloc_error_handler_name, default_fn_name, global_fn_name, @@ -71,15 +71,13 @@ pub(crate) unsafe fn codegen( None, ); - let name = mangle_internal_symbol(tcx, OomStrategy::SYMBOL); - let global = context.new_global(None, GlobalKind::Exported, i8, name); - #[cfg(feature = "master")] - global.add_attribute(VarAttribute::Visibility(symbol_visibility_to_gcc( - tcx.sess.default_visibility(), - ))); - let value = tcx.sess.opts.unstable_opts.oom.should_panic(); - let value = context.new_rvalue_from_int(i8, value as i32); - global.global_set_initializer_rvalue(value); + create_const_value_function( + tcx, + context, + &mangle_internal_symbol(tcx, OomStrategy::SYMBOL), + i8, + context.new_rvalue_from_int(i8, tcx.sess.opts.unstable_opts.oom.should_panic() as i32), + ); create_wrapper_function( tcx, @@ -91,6 +89,30 @@ pub(crate) unsafe fn codegen( ); } +fn create_const_value_function( + tcx: TyCtxt<'_>, + context: &Context<'_>, + name: &str, + output: Type<'_>, + value: RValue<'_>, +) { + let func = context.new_function(None, FunctionType::Exported, output, &[], name, false); + + #[cfg(feature = "master")] + func.add_attribute(FnAttribute::Visibility(symbol_visibility_to_gcc( + tcx.sess.default_visibility(), + ))); + + func.add_attribute(FnAttribute::AlwaysInline); + + if tcx.sess.must_emit_unwind_tables() { + // TODO(antoyo): emit unwind tables. + } + + let block = func.new_block("entry"); + block.end_with_return(None, value); +} + fn create_wrapper_function( tcx: TyCtxt<'_>, context: &Context<'_>, From 7b1674d5d0f395bd49434f0cec2f74c26b378362 Mon Sep 17 00:00:00 2001 From: Diggory Blake Date: Sat, 5 Jul 2025 15:58:04 +0100 Subject: [PATCH 007/118] Use `object` crate from crates.io to fix windows build error --- Cargo.lock | 10 ++++++++++ Cargo.toml | 1 + src/lib.rs | 1 - 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index b20c181a8cbf9..7f35c1a80bda7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -143,6 +143,15 @@ dependencies = [ "libc", ] +[[package]] +name = "object" +version = "0.37.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03fd943161069e1768b4b3d050890ba48730e590f57e56d4aa04e7e090e61b4a" +dependencies = [ + "memchr", +] + [[package]] name = "once_cell" version = "1.20.2" @@ -179,6 +188,7 @@ dependencies = [ "boml", "gccjit", "lang_tester", + "object", "tempfile", ] diff --git a/Cargo.toml b/Cargo.toml index c284e3f060b8f..05b0431b6ba9b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,6 +22,7 @@ master = ["gccjit/master"] default = ["master"] [dependencies] +object = { version = "0.37.0", default-features = false, features = ["std", "read"] } gccjit = "2.7" #gccjit = { git = "https://github.com/rust-lang/gccjit.rs" } diff --git a/src/lib.rs b/src/lib.rs index a912678ef2a10..56afdd55bf990 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -27,7 +27,6 @@ #![allow(clippy::needless_lifetimes, clippy::uninlined_format_args)] // Some "regular" crates we want to share with rustc -extern crate object; extern crate smallvec; // FIXME(antoyo): clippy bug: remove the #[allow] when it's fixed. #[allow(unused_extern_crates)] From 214311beb05883c5d07200d28cc926e2acfbaaad Mon Sep 17 00:00:00 2001 From: Diggory Blake Date: Sat, 5 Jul 2025 17:23:39 +0100 Subject: [PATCH 008/118] Make tempfile a normal dependency --- Cargo.toml | 2 +- src/lib.rs | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 05b0431b6ba9b..193348d1ef608 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,6 +23,7 @@ default = ["master"] [dependencies] object = { version = "0.37.0", default-features = false, features = ["std", "read"] } +tempfile = "3.20" gccjit = "2.7" #gccjit = { git = "https://github.com/rust-lang/gccjit.rs" } @@ -32,7 +33,6 @@ gccjit = "2.7" [dev-dependencies] boml = "0.3.1" lang_tester = "0.8.0" -tempfile = "3.20" [profile.dev] # By compiling dependencies with optimizations, performing tests gets much faster. diff --git a/src/lib.rs b/src/lib.rs index 56afdd55bf990..1a6eec0ed0bf9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -26,11 +26,9 @@ #![deny(clippy::pattern_type_mismatch)] #![allow(clippy::needless_lifetimes, clippy::uninlined_format_args)] -// Some "regular" crates we want to share with rustc +// These crates are pulled from the sysroot because they are part of +// rustc's public API, so we need to ensure version compatibility. extern crate smallvec; -// FIXME(antoyo): clippy bug: remove the #[allow] when it's fixed. -#[allow(unused_extern_crates)] -extern crate tempfile; #[macro_use] extern crate tracing; From 303a795ac5d9e60c4524fadebd36d0a8f95d8e9c Mon Sep 17 00:00:00 2001 From: Edoardo Marangoni Date: Sun, 29 Jun 2025 12:11:51 +0200 Subject: [PATCH 009/118] compiler: Parse `p-` specs in datalayout string, allow definition of custom default data address space --- src/common.rs | 2 +- src/consts.rs | 4 ++-- src/intrinsic/mod.rs | 2 +- src/intrinsic/simd.rs | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/common.rs b/src/common.rs index dd582834facad..32713eb56c6ed 100644 --- a/src/common.rs +++ b/src/common.rs @@ -162,7 +162,7 @@ impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> { } fn const_usize(&self, i: u64) -> RValue<'gcc> { - let bit_size = self.data_layout().pointer_size.bits(); + let bit_size = self.data_layout().pointer_size().bits(); if bit_size < 64 { // make sure it doesn't overflow assert!(i < (1 << bit_size)); diff --git a/src/consts.rs b/src/consts.rs index b43f9b24c6a31..c04c75e1b11fd 100644 --- a/src/consts.rs +++ b/src/consts.rs @@ -294,7 +294,7 @@ pub(crate) fn const_alloc_to_gcc_uncached<'gcc>( let alloc = alloc.inner(); let mut llvals = Vec::with_capacity(alloc.provenance().ptrs().len() + 1); let dl = cx.data_layout(); - let pointer_size = dl.pointer_size.bytes() as usize; + let pointer_size = dl.pointer_size().bytes() as usize; let mut next_offset = 0; for &(offset, prov) in alloc.provenance().ptrs().iter() { @@ -331,7 +331,7 @@ pub(crate) fn const_alloc_to_gcc_uncached<'gcc>( ), abi::Scalar::Initialized { value: Primitive::Pointer(address_space), - valid_range: WrappingRange::full(dl.pointer_size), + valid_range: WrappingRange::full(dl.pointer_size()), }, cx.type_i8p_ext(address_space), )); diff --git a/src/intrinsic/mod.rs b/src/intrinsic/mod.rs index 497605978fe27..0753ac1aeb84e 100644 --- a/src/intrinsic/mod.rs +++ b/src/intrinsic/mod.rs @@ -541,7 +541,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tc // For rusty ABIs, small aggregates are actually passed // as `RegKind::Integer` (see `FnAbi::adjust_for_abi`), // so we re-use that same threshold here. - layout.size() <= self.data_layout().pointer_size * 2 + layout.size() <= self.data_layout().pointer_size() * 2 } }; diff --git a/src/intrinsic/simd.rs b/src/intrinsic/simd.rs index 2e508813fc3bc..350915a277e33 100644 --- a/src/intrinsic/simd.rs +++ b/src/intrinsic/simd.rs @@ -1184,7 +1184,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>( let lhs = args[0].immediate(); let rhs = args[1].immediate(); let is_add = name == sym::simd_saturating_add; - let ptr_bits = bx.tcx().data_layout.pointer_size.bits() as _; + let ptr_bits = bx.tcx().data_layout.pointer_size().bits() as _; let (signed, elem_width, elem_ty) = match *in_elem.kind() { ty::Int(i) => (true, i.bit_width().unwrap_or(ptr_bits) / 8, bx.cx.type_int_from_ty(i)), ty::Uint(i) => { From dffe77dbabf273e811929580825c6d474a5a605a Mon Sep 17 00:00:00 2001 From: Yotam Ofek Date: Sat, 28 Jun 2025 23:40:02 +0000 Subject: [PATCH 010/118] Remove unused allow attrs --- src/lib.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/lib.rs b/src/lib.rs index 1a6eec0ed0bf9..d8fae1ca47d30 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -19,7 +19,6 @@ #![doc(rust_logo)] #![feature(rustdoc_internals)] #![feature(rustc_private)] -#![allow(broken_intra_doc_links)] #![recursion_limit = "256"] #![warn(rust_2018_idioms)] #![warn(unused_lifetimes)] From 46836c352d86268ad88fb406337413539b48ca32 Mon Sep 17 00:00:00 2001 From: mejrs <59372212+mejrs@users.noreply.github.com> Date: Tue, 24 Jun 2025 16:58:46 +0200 Subject: [PATCH 011/118] Remove support for dynamic allocas --- src/builder.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index b1785af444a17..28d1ec7d89564 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -926,10 +926,6 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { .get_address(self.location) } - fn dynamic_alloca(&mut self, _len: RValue<'gcc>, _align: Align) -> RValue<'gcc> { - unimplemented!(); - } - fn load(&mut self, pointee_ty: Type<'gcc>, ptr: RValue<'gcc>, align: Align) -> RValue<'gcc> { let block = self.llbb(); let function = block.get_function(); From 3d5b1774db136b03a80279f2829e58863056ff3a Mon Sep 17 00:00:00 2001 From: Oli Scherer Date: Wed, 12 Mar 2025 10:26:37 +0000 Subject: [PATCH 012/118] Add opaque TypeId handles for CTFE --- src/common.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/common.rs b/src/common.rs index 32713eb56c6ed..28848ca61845c 100644 --- a/src/common.rs +++ b/src/common.rs @@ -1,7 +1,6 @@ use gccjit::{LValue, RValue, ToRValue, Type}; -use rustc_abi as abi; -use rustc_abi::HasDataLayout; use rustc_abi::Primitive::Pointer; +use rustc_abi::{self as abi, HasDataLayout}; use rustc_codegen_ssa::traits::{ BaseTypeCodegenMethods, ConstCodegenMethods, MiscCodegenMethods, StaticCodegenMethods, }; @@ -282,6 +281,13 @@ impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> { let init = self.const_data_from_alloc(alloc); self.static_addr_of(init, alloc.inner().align, None) } + GlobalAlloc::TypeId { .. } => { + let val = self.const_usize(offset.bytes()); + // This is still a variable of pointer type, even though we only use the provenance + // of that pointer in CTFE and Miri. But to make LLVM's type system happy, + // we need an int-to-ptr cast here (it doesn't matter at all which provenance that picks). + return self.context.new_cast(None, val, ty); + } GlobalAlloc::Static(def_id) => { assert!(self.tcx.is_static(def_id)); self.get_static(def_id).get_address(None) From e55baf9f7ada72ed9f661194065154a09aab3a9e Mon Sep 17 00:00:00 2001 From: Folkert de Vries Date: Wed, 2 Jul 2025 11:12:54 +0200 Subject: [PATCH 013/118] use `codegen_instance_attrs` where an instance is (easily) available --- src/attributes.rs | 2 +- src/callee.rs | 2 +- src/mono_item.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/attributes.rs b/src/attributes.rs index bf0927dc590ba..7a1ae6ca9c8b7 100644 --- a/src/attributes.rs +++ b/src/attributes.rs @@ -87,7 +87,7 @@ pub fn from_fn_attrs<'gcc, 'tcx>( #[cfg_attr(not(feature = "master"), allow(unused_variables))] func: Function<'gcc>, instance: ty::Instance<'tcx>, ) { - let codegen_fn_attrs = cx.tcx.codegen_fn_attrs(instance.def_id()); + let codegen_fn_attrs = cx.tcx.codegen_instance_attrs(instance.def); #[cfg(feature = "master")] { diff --git a/src/callee.rs b/src/callee.rs index 189ac7cd77928..e7ca95af594c6 100644 --- a/src/callee.rs +++ b/src/callee.rs @@ -105,7 +105,7 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>) let is_hidden = if is_generic { // This is a monomorphization of a generic function. if !(cx.tcx.sess.opts.share_generics() - || tcx.codegen_fn_attrs(instance_def_id).inline + || tcx.codegen_instance_attrs(instance.def).inline == rustc_attr_data_structures::InlineAttr::Never) { // When not sharing generics, all instances are in the same diff --git a/src/mono_item.rs b/src/mono_item.rs index 539e3ac850763..51f35cbdee472 100644 --- a/src/mono_item.rs +++ b/src/mono_item.rs @@ -53,7 +53,7 @@ impl<'gcc, 'tcx> PreDefineCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty()); self.linkage.set(base::linkage_to_gcc(linkage)); let decl = self.declare_fn(symbol_name, fn_abi); - //let attrs = self.tcx.codegen_fn_attrs(instance.def_id()); + //let attrs = self.tcx.codegen_instance_attrs(instance.def); attributes::from_fn_attrs(self, decl, instance); From d088fb739b40c083d3dd74185c4e1b05c86839ce Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Fri, 18 Jul 2025 18:31:20 +0200 Subject: [PATCH 014/118] Merge commit 'f682d09eefc6700b9e5851ef193847959acf4fac' into subtree-update_cg_gcc_2025-07-18 --- .github/workflows/m68k.yml | 40 +- .github/workflows/release.yml | 3 +- build_system/build_sysroot/Cargo.lock | 502 -------------------------- build_system/build_sysroot/Cargo.toml | 39 -- build_system/build_sysroot/lib.rs | 1 - build_system/src/build.rs | 38 +- build_system/src/config.rs | 5 - build_system/src/utils.rs | 13 - doc/tips.md | 6 +- example/mini_core_hello_world.rs | 16 +- rust-toolchain | 2 +- src/builder.rs | 6 +- src/lib.rs | 7 +- src/mono_item.rs | 2 +- tests/failing-ui-tests.txt | 2 + tests/run/asm.rs | 1 + tests/run/float.rs | 16 +- tests/run/int.rs | 2 - tests/run/volatile.rs | 3 +- tests/run/volatile2.rs | 6 +- 20 files changed, 85 insertions(+), 625 deletions(-) delete mode 100644 build_system/build_sysroot/Cargo.lock delete mode 100644 build_system/build_sysroot/Cargo.toml delete mode 100644 build_system/build_sysroot/lib.rs diff --git a/.github/workflows/m68k.yml b/.github/workflows/m68k.yml index 245bee7f2a3bd..759d0d59e2685 100644 --- a/.github/workflows/m68k.yml +++ b/.github/workflows/m68k.yml @@ -14,8 +14,6 @@ permissions: env: # Enable backtraces for easier debugging RUST_BACKTRACE: 1 - # TODO: remove when confish.sh is removed. - OVERWRITE_TARGET_TRIPLE: m68k-unknown-linux-gnu jobs: build: @@ -59,14 +57,12 @@ jobs: - name: Setup path to libgccjit run: | - sudo dpkg -i gcc-m68k-15.deb + sudo dpkg --force-overwrite -i gcc-m68k-15.deb echo 'gcc-path = "/usr/lib/"' > config.toml - name: Set env run: | echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV - - #- name: Cache rust repository ## We only clone the rust repository for rustc tests @@ -86,16 +82,20 @@ jobs: - name: Build sample project with target defined as JSON spec run: | ./y.sh prepare --only-libcore --cross - ./y.sh build --sysroot --features compiler_builtins/no-f16-f128 --target-triple m68k-unknown-linux-gnu --target ${{ github.workspace }}/target_specs/m68k-unknown-linux-gnu.json - ./y.sh cargo build --manifest-path=./tests/hello-world/Cargo.toml --target ${{ github.workspace }}/target_specs/m68k-unknown-linux-gnu.json + ./y.sh build --sysroot --features compiler-builtins-no-f16-f128 --target-triple m68k-unknown-linux-gnu --target ${{ github.workspace }}/target_specs/m68k-unknown-linux-gnu.json + CG_RUSTFLAGS="-Clinker=m68k-unknown-linux-gnu-gcc" ./y.sh cargo build --manifest-path=./tests/hello-world/Cargo.toml --target ${{ github.workspace }}/target_specs/m68k-unknown-linux-gnu.json ./y.sh clean all - name: Build run: | ./y.sh prepare --only-libcore --cross - ./y.sh build --sysroot --features compiler_builtins/no-f16-f128 --target-triple m68k-unknown-linux-gnu - ./y.sh test --mini-tests - CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu ./y.sh test --cargo-tests + ./y.sh build --sysroot --features compiler-builtins-no-f16-f128 --target-triple m68k-unknown-linux-gnu + ./y.sh test --mini-tests --target-triple m68k-unknown-linux-gnu + # FIXME: since https://github.com/rust-lang/rust/pull/140809, we cannot run programs for architectures not + # supported by the object crate, since this adds a dependency on symbols.o for the panic runtime. + # And as such, a wrong order of the object files in the linker command now fails with an undefined reference + # to some symbols like __rustc::rust_panic. + #CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu ./y.sh test --cargo-tests --target-triple m68k-unknown-linux-gnu ./y.sh clean all - name: Prepare dependencies @@ -104,9 +104,23 @@ jobs: git config --global user.name "User" ./y.sh prepare --cross - - name: Run tests - run: | - ./y.sh test --release --clean --build-sysroot --sysroot-features compiler_builtins/no-f16-f128 ${{ matrix.commands }} + # FIXME: We cannot run programs for architectures not supported by the object crate. See comment above. + #- name: Run tests + #run: | + #./y.sh test --target-triple m68k-unknown-linux-gnu --release --clean --build-sysroot --sysroot-features compiler-builtins-no-f16-f128 ${{ matrix.commands }} + + # FIXME: We cannot run programs for architectures not supported by the object crate. See comment above. + #- name: Run Hello World! + #run: | + #./y.sh build --target-triple m68k-unknown-linux-gnu + + #vm_dir=$(pwd)/vm + #cd tests/hello-world + #CG_RUSTFLAGS="-Clinker=m68k-unknown-linux-gnu-gcc" ../../y.sh cargo build --target m68k-unknown-linux-gnu + #sudo cp target/m68k-unknown-linux-gnu/debug/hello_world $vm_dir/home/ + #sudo chroot $vm_dir qemu-m68k-static /home/hello_world > hello_world_stdout + #expected_output="40" + #test $(cat hello_world_stdout) == $expected_output || (echo "Output differs. Actual output: $(cat hello_world_stdout)"; exit 1) # Summary job for the merge queue. # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB! diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1d8eaf9a141f6..b7e2583aad39f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -78,7 +78,8 @@ jobs: - name: Run tests run: | # FIXME(antoyo): we cannot enable LTO for stdarch tests currently because of some failing LTO tests using proc-macros. - echo -n 'lto = "fat"' >> build_system/build_sysroot/Cargo.toml + # FIXME(antoyo): this should probably not be needed since we embed the LTO bitcode. + printf '[profile.release]\nlto = "fat"\n' >> build/build_sysroot/sysroot_src/library/Cargo.toml EMBED_LTO_BITCODE=1 ./y.sh test --release --clean --release-sysroot --build-sysroot --keep-lto-tests ${{ matrix.commands }} - name: Run y.sh cargo build diff --git a/build_system/build_sysroot/Cargo.lock b/build_system/build_sysroot/Cargo.lock deleted file mode 100644 index 0c75977ee7989..0000000000000 --- a/build_system/build_sysroot/Cargo.lock +++ /dev/null @@ -1,502 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "addr2line" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" -dependencies = [ - "compiler_builtins", - "gimli", - "rustc-std-workspace-alloc", - "rustc-std-workspace-core", -] - -[[package]] -name = "adler2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" -dependencies = [ - "compiler_builtins", - "rustc-std-workspace-core", -] - -[[package]] -name = "alloc" -version = "0.0.0" -dependencies = [ - "compiler_builtins", - "core", -] - -[[package]] -name = "alloctests" -version = "0.0.0" -dependencies = [ - "rand", - "rand_xorshift", -] - -[[package]] -name = "cc" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aeb932158bd710538c73702db6945cb68a8fb08c519e6e12706b94263b36db8" -dependencies = [ - "shlex", -] - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -dependencies = [ - "compiler_builtins", - "rustc-std-workspace-core", -] - -[[package]] -name = "compiler_builtins" -version = "0.1.160" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6376049cfa92c0aa8b9ac95fae22184b981c658208d4ed8a1dc553cd83612895" -dependencies = [ - "cc", - "rustc-std-workspace-core", -] - -[[package]] -name = "core" -version = "0.0.0" - -[[package]] -name = "coretests" -version = "0.0.0" -dependencies = [ - "rand", - "rand_xorshift", -] - -[[package]] -name = "dlmalloc" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cff88b751e7a276c4ab0e222c3f355190adc6dde9ce39c851db39da34990df7" -dependencies = [ - "cfg-if", - "compiler_builtins", - "libc", - "rustc-std-workspace-core", - "windows-sys", -] - -[[package]] -name = "fortanix-sgx-abi" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57cafc2274c10fab234f176b25903ce17e690fca7597090d50880e047a0389c5" -dependencies = [ - "compiler_builtins", - "rustc-std-workspace-core", -] - -[[package]] -name = "getopts" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5" -dependencies = [ - "rustc-std-workspace-core", - "rustc-std-workspace-std", - "unicode-width", -] - -[[package]] -name = "gimli" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" -dependencies = [ - "compiler_builtins", - "rustc-std-workspace-alloc", - "rustc-std-workspace-core", -] - -[[package]] -name = "hashbrown" -version = "0.15.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3" -dependencies = [ - "compiler_builtins", - "rustc-std-workspace-alloc", - "rustc-std-workspace-core", -] - -[[package]] -name = "hermit-abi" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f154ce46856750ed433c8649605bf7ed2de3bc35fd9d2a9f30cddd873c80cb08" -dependencies = [ - "compiler_builtins", - "rustc-std-workspace-alloc", - "rustc-std-workspace-core", -] - -[[package]] -name = "libc" -version = "0.2.172" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" -dependencies = [ - "rustc-std-workspace-core", -] - -[[package]] -name = "memchr" -version = "2.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" -dependencies = [ - "compiler_builtins", - "rustc-std-workspace-core", -] - -[[package]] -name = "miniz_oxide" -version = "0.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" -dependencies = [ - "adler2", - "compiler_builtins", - "rustc-std-workspace-alloc", - "rustc-std-workspace-core", -] - -[[package]] -name = "object" -version = "0.36.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" -dependencies = [ - "compiler_builtins", - "memchr", - "rustc-std-workspace-alloc", - "rustc-std-workspace-core", -] - -[[package]] -name = "panic_abort" -version = "0.0.0" -dependencies = [ - "alloc", - "compiler_builtins", - "core", - "libc", -] - -[[package]] -name = "panic_unwind" -version = "0.0.0" -dependencies = [ - "alloc", - "cfg-if", - "compiler_builtins", - "core", - "libc", - "unwind", -] - -[[package]] -name = "proc_macro" -version = "0.0.0" -dependencies = [ - "core", - "rustc-literal-escaper", - "std", -] - -[[package]] -name = "profiler_builtins" -version = "0.0.0" -dependencies = [ - "cc", -] - -[[package]] -name = "r-efi" -version = "5.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" -dependencies = [ - "compiler_builtins", - "rustc-std-workspace-core", -] - -[[package]] -name = "r-efi-alloc" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e43c53ff1a01d423d1cb762fd991de07d32965ff0ca2e4f80444ac7804198203" -dependencies = [ - "compiler_builtins", - "r-efi", - "rustc-std-workspace-core", -] - -[[package]] -name = "rand" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" -dependencies = [ - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" - -[[package]] -name = "rand_xorshift" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" -dependencies = [ - "rand_core", -] - -[[package]] -name = "rustc-demangle" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" -dependencies = [ - "compiler_builtins", - "rustc-std-workspace-core", -] - -[[package]] -name = "rustc-literal-escaper" -version = "0.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0041b6238913c41fe704213a4a9329e2f685a156d1781998128b4149c230ad04" -dependencies = [ - "rustc-std-workspace-std", -] - -[[package]] -name = "rustc-std-workspace-alloc" -version = "1.99.0" -dependencies = [ - "alloc", -] - -[[package]] -name = "rustc-std-workspace-core" -version = "1.99.0" -dependencies = [ - "core", -] - -[[package]] -name = "rustc-std-workspace-std" -version = "1.99.0" -dependencies = [ - "std", -] - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "std" -version = "0.0.0" -dependencies = [ - "addr2line", - "alloc", - "cfg-if", - "compiler_builtins", - "core", - "dlmalloc", - "fortanix-sgx-abi", - "hashbrown", - "hermit-abi", - "libc", - "miniz_oxide", - "object", - "panic_abort", - "panic_unwind", - "r-efi", - "r-efi-alloc", - "rand", - "rand_xorshift", - "rustc-demangle", - "std_detect", - "unwind", - "wasi", - "windows-targets 0.0.0", -] - -[[package]] -name = "std_detect" -version = "0.1.5" -dependencies = [ - "cfg-if", - "compiler_builtins", - "libc", - "rustc-std-workspace-alloc", - "rustc-std-workspace-core", -] - -[[package]] -name = "sysroot" -version = "0.0.0" -dependencies = [ - "proc_macro", - "profiler_builtins", - "std", - "test", -] - -[[package]] -name = "test" -version = "0.0.0" -dependencies = [ - "core", - "getopts", - "libc", - "std", -] - -[[package]] -name = "unicode-width" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" -dependencies = [ - "compiler_builtins", - "rustc-std-workspace-core", - "rustc-std-workspace-std", -] - -[[package]] -name = "unwind" -version = "0.0.0" -dependencies = [ - "cfg-if", - "compiler_builtins", - "core", - "libc", - "unwinding", -] - -[[package]] -name = "unwinding" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8393f2782b6060a807337ff353780c1ca15206f9ba2424df18cb6e733bd7b345" -dependencies = [ - "compiler_builtins", - "gimli", - "rustc-std-workspace-core", -] - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" -dependencies = [ - "compiler_builtins", - "rustc-std-workspace-alloc", - "rustc-std-workspace-core", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-targets" -version = "0.0.0" - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" diff --git a/build_system/build_sysroot/Cargo.toml b/build_system/build_sysroot/Cargo.toml deleted file mode 100644 index 29a3bcec304c8..0000000000000 --- a/build_system/build_sysroot/Cargo.toml +++ /dev/null @@ -1,39 +0,0 @@ -[package] -authors = ["rustc_codegen_gcc devs"] -name = "sysroot" -version = "0.0.0" -resolver = "2" - -[dependencies] -core = { path = "./sysroot_src/library/core" } -compiler_builtins = { path = "./sysroot_src/library/compiler-builtins/compiler-builtins" } -alloc = { path = "./sysroot_src/library/alloc" } -std = { path = "./sysroot_src/library/std", features = ["panic_unwind", "backtrace"] } -test = { path = "./sysroot_src/library/test" } -proc_macro = { path = "./sysroot_src/library/proc_macro" } - -[patch.crates-io] -rustc-std-workspace-core = { path = "./sysroot_src/library/rustc-std-workspace-core" } -rustc-std-workspace-alloc = { path = "./sysroot_src/library/rustc-std-workspace-alloc" } -rustc-std-workspace-std = { path = "./sysroot_src/library/rustc-std-workspace-std" } -compiler_builtins = { path = "./sysroot_src/library/compiler-builtins/compiler-builtins" } - -# For compiler-builtins we always use a high number of codegen units. -# The goal here is to place every single intrinsic into its own object -# file to avoid symbol clashes with the system libgcc if possible. Note -# that this number doesn't actually produce this many object files, we -# just don't create more than this number of object files. -# -# It's a bit of a bummer that we have to pass this here, unfortunately. -# Ideally this would be specified through an env var to Cargo so Cargo -# knows how many CGUs are for this specific crate, but for now -# per-crate configuration isn't specifiable in the environment. -[profile.dev.package.compiler_builtins] -codegen-units = 10000 - -[profile.release.package.compiler_builtins] -codegen-units = 10000 - -[profile.release] -debug = "limited" -#lto = "fat" # TODO(antoyo): re-enable when the failing LTO tests regarding proc-macros are fixed. diff --git a/build_system/build_sysroot/lib.rs b/build_system/build_sysroot/lib.rs deleted file mode 100644 index 0c9ac1ac8e4bd..0000000000000 --- a/build_system/build_sysroot/lib.rs +++ /dev/null @@ -1 +0,0 @@ -#![no_std] diff --git a/build_system/src/build.rs b/build_system/src/build.rs index ecc4c1b2fe224..94b40319f4a77 100644 --- a/build_system/src/build.rs +++ b/build_system/src/build.rs @@ -5,7 +5,7 @@ use std::path::Path; use crate::config::{Channel, ConfigInfo}; use crate::utils::{ - copy_file, create_dir, get_sysroot_dir, run_command, run_command_with_output_and_env, walk_dir, + create_dir, get_sysroot_dir, run_command, run_command_with_output_and_env, walk_dir, }; #[derive(Default)] @@ -53,11 +53,11 @@ impl BuildArg { } } -fn cleanup_sysroot_previous_build(start_dir: &Path) { +fn cleanup_sysroot_previous_build(library_dir: &Path) { // Cleanup for previous run // Clean target dir except for build scripts and incremental cache let _ = walk_dir( - start_dir.join("target"), + library_dir.join("target"), &mut |dir: &Path| { for top in &["debug", "release"] { let _ = fs::remove_dir_all(dir.join(top).join("build")); @@ -95,31 +95,13 @@ fn cleanup_sysroot_previous_build(start_dir: &Path) { &mut |_| Ok(()), false, ); - - let _ = fs::remove_file(start_dir.join("Cargo.lock")); - let _ = fs::remove_file(start_dir.join("test_target/Cargo.lock")); - let _ = fs::remove_dir_all(start_dir.join("sysroot")); -} - -pub fn create_build_sysroot_content(start_dir: &Path) -> Result<(), String> { - if !start_dir.is_dir() { - create_dir(start_dir)?; - } - copy_file("build_system/build_sysroot/Cargo.toml", start_dir.join("Cargo.toml"))?; - copy_file("build_system/build_sysroot/Cargo.lock", start_dir.join("Cargo.lock"))?; - - let src_dir = start_dir.join("src"); - if !src_dir.is_dir() { - create_dir(&src_dir)?; - } - copy_file("build_system/build_sysroot/lib.rs", start_dir.join("src/lib.rs")) } pub fn build_sysroot(env: &HashMap, config: &ConfigInfo) -> Result<(), String> { let start_dir = get_sysroot_dir(); - cleanup_sysroot_previous_build(&start_dir); - create_build_sysroot_content(&start_dir)?; + let library_dir = start_dir.join("sysroot_src").join("library"); + cleanup_sysroot_previous_build(&library_dir); // Builds libs let mut rustflags = env.get("RUSTFLAGS").cloned().unwrap_or_default(); @@ -157,9 +139,13 @@ pub fn build_sysroot(env: &HashMap, config: &ConfigInfo) -> Resu rustflags.push_str(&cg_rustflags); } + args.push(&"--features"); + args.push(&"backtrace"); + let mut env = env.clone(); env.insert("RUSTFLAGS".to_string(), rustflags); - run_command_with_output_and_env(&args, Some(&start_dir), Some(&env))?; + let sysroot_dir = library_dir.join("sysroot"); + run_command_with_output_and_env(&args, Some(&sysroot_dir), Some(&env))?; // Copy files to sysroot let sysroot_path = start_dir.join(format!("sysroot/lib/rustlib/{}/lib/", config.target_triple)); @@ -169,7 +155,7 @@ pub fn build_sysroot(env: &HashMap, config: &ConfigInfo) -> Resu run_command(&[&"cp", &"-r", &dir_to_copy, &sysroot_path], None).map(|_| ()) }; walk_dir( - start_dir.join(format!("target/{}/{}/deps", config.target_triple, channel)), + library_dir.join(format!("target/{}/{}/deps", config.target_triple, channel)), &mut copier.clone(), &mut copier, false, @@ -178,7 +164,7 @@ pub fn build_sysroot(env: &HashMap, config: &ConfigInfo) -> Resu // Copy the source files to the sysroot (Rust for Linux needs this). let sysroot_src_path = start_dir.join("sysroot/lib/rustlib/src/rust"); create_dir(&sysroot_src_path)?; - run_command(&[&"cp", &"-r", &start_dir.join("sysroot_src/library/"), &sysroot_src_path], None)?; + run_command(&[&"cp", &"-r", &library_dir, &sysroot_src_path], None)?; Ok(()) } diff --git a/build_system/src/config.rs b/build_system/src/config.rs index 650c030ca5393..a5f802e293a94 100644 --- a/build_system/src/config.rs +++ b/build_system/src/config.rs @@ -352,11 +352,6 @@ impl ConfigInfo { None => return Err("no host found".to_string()), }; - if self.target_triple.is_empty() - && let Some(overwrite) = env.get("OVERWRITE_TARGET_TRIPLE") - { - self.target_triple = overwrite.clone(); - } if self.target_triple.is_empty() { self.target_triple = self.host_triple.clone(); } diff --git a/build_system/src/utils.rs b/build_system/src/utils.rs index d77707d5f17a6..fc948c54b24aa 100644 --- a/build_system/src/utils.rs +++ b/build_system/src/utils.rs @@ -303,19 +303,6 @@ pub fn create_dir>(path: P) -> Result<(), String> { }) } -pub fn copy_file, T: AsRef>(from: F, to: T) -> Result<(), String> { - fs::copy(&from, &to) - .map_err(|error| { - format!( - "Failed to copy file `{}` into `{}`: {:?}", - from.as_ref().display(), - to.as_ref().display(), - error - ) - }) - .map(|_| ()) -} - /// This function differs from `git_clone` in how it handles *where* the repository will be cloned. /// In `git_clone`, it is cloned in the provided path. In this function, the path you provide is /// the parent folder. So if you pass "a" as folder and try to clone "b.git", it will be cloned into diff --git a/doc/tips.md b/doc/tips.md index 86c22db186e01..e62c3402a292d 100644 --- a/doc/tips.md +++ b/doc/tips.md @@ -62,14 +62,14 @@ generate it in [gimple.md](./doc/gimple.md). * Run `./y.sh prepare --cross` so that the sysroot is patched for the cross-compiling case. * Set the path to the cross-compiling libgccjit in `gcc-path` (in `config.toml`). - * Make sure you have the linker for your target (for instance `m68k-unknown-linux-gnu-gcc`) in your `$PATH`. Currently, the linker name is hardcoded as being `$TARGET-gcc`. Specify the target when building the sysroot: `./y.sh build --sysroot --target-triple m68k-unknown-linux-gnu`. - * Build your project by specifying the target: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../y.sh cargo build --target m68k-unknown-linux-gnu`. + * Make sure you have the linker for your target (for instance `m68k-unknown-linux-gnu-gcc`) in your `$PATH`. You can specify which linker to use via `CG_RUSTFLAGS="-Clinker="`, for instance: `CG_RUSTFLAGS="-Clinker=m68k-unknown-linux-gnu-gcc"`. Specify the target when building the sysroot: `./y.sh build --sysroot --target-triple m68k-unknown-linux-gnu`. + * Build your project by specifying the target and the linker to use: `CG_RUSTFLAGS="-Clinker=m68k-unknown-linux-gnu-gcc" ../y.sh cargo build --target m68k-unknown-linux-gnu`. If the target is not yet supported by the Rust compiler, create a [target specification file](https://docs.rust-embedded.org/embedonomicon/custom-target.html) (note that the `arch` specified in this file must be supported by the rust compiler). Then, you can use it the following way: * Add the target specification file using `--target` as an **absolute** path to build the sysroot: `./y.sh build --sysroot --target-triple m68k-unknown-linux-gnu --target $(pwd)/m68k-unknown-linux-gnu.json` - * Build your project by specifying the target specification file: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../y.sh cargo build --target path/to/m68k-unknown-linux-gnu.json`. + * Build your project by specifying the target specification file: `../y.sh cargo build --target path/to/m68k-unknown-linux-gnu.json`. If you get the following error: diff --git a/example/mini_core_hello_world.rs b/example/mini_core_hello_world.rs index 6b6f71edaf8c9..358f265a6b856 100644 --- a/example/mini_core_hello_world.rs +++ b/example/mini_core_hello_world.rs @@ -6,7 +6,7 @@ )] #![no_core] #![allow(dead_code, internal_features, non_camel_case_types)] -#![rustfmt::skip] +#![rustfmt_skip] extern crate mini_core; @@ -198,10 +198,24 @@ fn main() { assert_eq!(intrinsics::align_of::() as u8, 2); assert_eq!(intrinsics::align_of_val(&a) as u8, intrinsics::align_of::<&str>() as u8); +<<<<<<< HEAD assert!(!const { intrinsics::needs_drop::() }); assert!(!const { intrinsics::needs_drop::<[u8]>() }); assert!(const { intrinsics::needs_drop::() }); assert!(const { intrinsics::needs_drop::() }); +======= + /* + * TODO: re-enable in the next sync. + let u8_needs_drop = const { intrinsics::needs_drop::() }; + assert!(!u8_needs_drop); + let slice_needs_drop = const { intrinsics::needs_drop::<[u8]>() }; + assert!(!slice_needs_drop); + let noisy_drop = const { intrinsics::needs_drop::() }; + assert!(noisy_drop); + let noisy_unsized_drop = const { intrinsics::needs_drop::() }; + assert!(noisy_unsized_drop); + */ +>>>>>>> f682d09eefc6700b9e5851ef193847959acf4fac Unique { pointer: 0 as *const &str, diff --git a/rust-toolchain b/rust-toolchain index bccbc6cd2c5cd..2fe8ec4647fad 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2025-06-28" +channel = "nightly-2025-07-04" components = ["rust-src", "rustc-dev", "llvm-tools-preview"] diff --git a/src/builder.rs b/src/builder.rs index 28d1ec7d89564..a4ec4bf8deac4 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -971,7 +971,11 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { fn volatile_load(&mut self, ty: Type<'gcc>, ptr: RValue<'gcc>) -> RValue<'gcc> { let ptr = self.context.new_cast(self.location, ptr, ty.make_volatile().make_pointer()); - ptr.dereference(self.location).to_rvalue() + // (FractalFir): We insert a local here, to ensure this volatile load can't move across + // blocks. + let local = self.current_func().new_local(self.location, ty, "volatile_tmp"); + self.block.add_assignment(self.location, local, ptr.dereference(self.location).to_rvalue()); + local.to_rvalue() } fn atomic_load( diff --git a/src/lib.rs b/src/lib.rs index d81bcc5977563..af416929ea73d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -273,6 +273,10 @@ fn new_context<'gcc, 'tcx>(tcx: TyCtxt<'tcx>) -> Context<'gcc> { } impl ExtraBackendMethods for GccCodegenBackend { + fn supports_parallel(&self) -> bool { + false + } + fn codegen_allocator( &self, tcx: TyCtxt<'_>, @@ -341,8 +345,7 @@ impl Deref for SyncContext { } unsafe impl Send for SyncContext {} -// FIXME(antoyo): that shouldn't be Sync. Parallel compilation is currently disabled with "-Zno-parallel-llvm". -// TODO: disable it here by returning false in CodegenBackend::supports_parallel(). +// FIXME(antoyo): that shouldn't be Sync. Parallel compilation is currently disabled with "CodegenBackend::supports_parallel()". unsafe impl Sync for SyncContext {} impl WriteBackendMethods for GccCodegenBackend { diff --git a/src/mono_item.rs b/src/mono_item.rs index 51f35cbdee472..ff188c437daea 100644 --- a/src/mono_item.rs +++ b/src/mono_item.rs @@ -64,7 +64,7 @@ impl<'gcc, 'tcx> PreDefineCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { if linkage != Linkage::Internal && self.tcx.is_compiler_builtins(LOCAL_CRATE) { #[cfg(feature = "master")] decl.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden)); - } else { + } else if visibility != Visibility::Default { #[cfg(feature = "master")] decl.add_attribute(FnAttribute::Visibility(base::visibility_to_gcc(visibility))); } diff --git a/tests/failing-ui-tests.txt b/tests/failing-ui-tests.txt index 544d0bfc71050..6979c04d53430 100644 --- a/tests/failing-ui-tests.txt +++ b/tests/failing-ui-tests.txt @@ -80,3 +80,5 @@ tests/ui/uninhabited/uninhabited-transparent-return-abi.rs tests/ui/coroutine/panic-drops-resume.rs tests/ui/coroutine/panic-drops.rs tests/ui/coroutine/panic-safe.rs +tests/ui/process/nofile-limit.rs +tests/ui/simd/intrinsic/generic-arithmetic-pass.rs diff --git a/tests/run/asm.rs b/tests/run/asm.rs index 2dbf43be664dc..9b15a28d82988 100644 --- a/tests/run/asm.rs +++ b/tests/run/asm.rs @@ -16,6 +16,7 @@ add_asm: ret" ); +#[cfg(target_arch = "x86_64")] extern "C" { fn add_asm(a: i64, b: i64) -> i64; } diff --git a/tests/run/float.rs b/tests/run/float.rs index 424fa1cf4ad53..df555f383fe08 100644 --- a/tests/run/float.rs +++ b/tests/run/float.rs @@ -3,8 +3,6 @@ // Run-time: // status: 0 -#![feature(const_black_box)] - fn main() { use std::hint::black_box; @@ -15,14 +13,14 @@ fn main() { }}; } - check!(i32, (black_box(0.0f32) as i32)); + check!(i32, black_box(0.0f32) as i32); - check!(u64, (black_box(f32::NAN) as u64)); - check!(u128, (black_box(f32::NAN) as u128)); + check!(u64, black_box(f32::NAN) as u64); + check!(u128, black_box(f32::NAN) as u128); - check!(i64, (black_box(f64::NAN) as i64)); - check!(u64, (black_box(f64::NAN) as u64)); + check!(i64, black_box(f64::NAN) as i64); + check!(u64, black_box(f64::NAN) as u64); - check!(i16, (black_box(f32::MIN) as i16)); - check!(i16, (black_box(f32::MAX) as i16)); + check!(i16, black_box(f32::MIN) as i16); + check!(i16, black_box(f32::MAX) as i16); } diff --git a/tests/run/int.rs b/tests/run/int.rs index 47b5dea46f8de..e20ecc23679d6 100644 --- a/tests/run/int.rs +++ b/tests/run/int.rs @@ -3,8 +3,6 @@ // Run-time: // status: 0 -#![feature(const_black_box)] - fn main() { use std::hint::black_box; diff --git a/tests/run/volatile.rs b/tests/run/volatile.rs index 8b0433125936b..94a7bdc5c0668 100644 --- a/tests/run/volatile.rs +++ b/tests/run/volatile.rs @@ -5,13 +5,14 @@ use std::mem::MaybeUninit; +#[allow(dead_code)] #[derive(Debug)] struct Struct { pointer: *const (), func: unsafe fn(*const ()), } -fn func(ptr: *const ()) { +fn func(_ptr: *const ()) { } fn main() { diff --git a/tests/run/volatile2.rs b/tests/run/volatile2.rs index a177b817ab35a..bdcb82598789c 100644 --- a/tests/run/volatile2.rs +++ b/tests/run/volatile2.rs @@ -6,8 +6,6 @@ mod libc { #[link(name = "c")] extern "C" { - pub fn puts(s: *const u8) -> i32; - pub fn sigaction(signum: i32, act: *const sigaction, oldact: *mut sigaction) -> i32; pub fn mmap(addr: *mut (), len: usize, prot: i32, flags: i32, fd: i32, offset: i64) -> *mut (); pub fn mprotect(addr: *mut (), len: usize, prot: i32) -> i32; @@ -61,7 +59,7 @@ fn main() { panic!("error: mmap failed"); } - let p_count = (&mut COUNT) as *mut u32; + let p_count = (&raw mut COUNT) as *mut u32; p_count.write_volatile(0); // Trigger segfaults @@ -94,7 +92,7 @@ fn main() { } unsafe extern "C" fn segv_handler(_: i32, _: *mut (), _: *mut ()) { - let p_count = (&mut COUNT) as *mut u32; + let p_count = (&raw mut COUNT) as *mut u32; p_count.write_volatile(p_count.read_volatile() + 1); let count = p_count.read_volatile(); From 4475b1c988aa94ad311fe83de9f0d0c9dbe08cb4 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Fri, 18 Jul 2025 22:35:30 +0200 Subject: [PATCH 015/118] Remove forgotten git annotations --- example/mini_core_hello_world.rs | 7 ------- 1 file changed, 7 deletions(-) diff --git a/example/mini_core_hello_world.rs b/example/mini_core_hello_world.rs index 358f265a6b856..85489f850e248 100644 --- a/example/mini_core_hello_world.rs +++ b/example/mini_core_hello_world.rs @@ -198,12 +198,6 @@ fn main() { assert_eq!(intrinsics::align_of::() as u8, 2); assert_eq!(intrinsics::align_of_val(&a) as u8, intrinsics::align_of::<&str>() as u8); -<<<<<<< HEAD - assert!(!const { intrinsics::needs_drop::() }); - assert!(!const { intrinsics::needs_drop::<[u8]>() }); - assert!(const { intrinsics::needs_drop::() }); - assert!(const { intrinsics::needs_drop::() }); -======= /* * TODO: re-enable in the next sync. let u8_needs_drop = const { intrinsics::needs_drop::() }; @@ -215,7 +209,6 @@ fn main() { let noisy_unsized_drop = const { intrinsics::needs_drop::() }; assert!(noisy_unsized_drop); */ ->>>>>>> f682d09eefc6700b9e5851ef193847959acf4fac Unique { pointer: 0 as *const &str, From 8b8ffa8b4e58c2c451a53ae428718edf111922e0 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Fri, 4 Jul 2025 07:42:28 +0000 Subject: [PATCH 016/118] Merge modules and cached_modules for fat LTO The modules vec can already contain serialized modules and there is no need to distinguish between cached and non-cached cgus at LTO time. --- src/back/lto.rs | 12 ------------ src/lib.rs | 3 +-- 2 files changed, 1 insertion(+), 14 deletions(-) diff --git a/src/back/lto.rs b/src/back/lto.rs index e554dd2500bda..9f2842d7abc8f 100644 --- a/src/back/lto.rs +++ b/src/back/lto.rs @@ -175,7 +175,6 @@ fn save_as_file(obj: &[u8], path: &Path) -> Result<(), LtoBitcodeFromRlib> { pub(crate) fn run_fat( cgcx: &CodegenContext, modules: Vec>, - cached_modules: Vec<(SerializedModule, WorkProduct)>, ) -> Result, FatalError> { let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); @@ -186,7 +185,6 @@ pub(crate) fn run_fat( cgcx, dcx, modules, - cached_modules, lto_data.upstream_modules, lto_data.tmp_path, //<o_data.symbols_below_threshold, @@ -197,7 +195,6 @@ fn fat_lto( cgcx: &CodegenContext, _dcx: DiagCtxtHandle<'_>, modules: Vec>, - cached_modules: Vec<(SerializedModule, WorkProduct)>, mut serialized_modules: Vec<(SerializedModule, CString)>, tmp_path: TempDir, //symbols_below_threshold: &[String], @@ -211,21 +208,12 @@ fn fat_lto( // modules that are serialized in-memory. // * `in_memory` contains modules which are already parsed and in-memory, // such as from multi-CGU builds. - // - // All of `cached_modules` (cached from previous incremental builds) can - // immediately go onto the `serialized_modules` modules list and then we can - // split the `modules` array into these two lists. let mut in_memory = Vec::new(); - serialized_modules.extend(cached_modules.into_iter().map(|(buffer, wp)| { - info!("pushing cached module {:?}", wp.cgu_name); - (buffer, CString::new(wp.cgu_name).unwrap()) - })); for module in modules { match module { FatLtoInput::InMemory(m) => in_memory.push(m), FatLtoInput::Serialized { name, buffer } => { info!("pushing serialized module {:?}", name); - let buffer = SerializedModule::Local(buffer); serialized_modules.push((buffer, CString::new(name).unwrap())); } } diff --git a/src/lib.rs b/src/lib.rs index af416929ea73d..3fbbaacf1bbb7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -359,14 +359,13 @@ impl WriteBackendMethods for GccCodegenBackend { fn run_and_optimize_fat_lto( cgcx: &CodegenContext, modules: Vec>, - cached_modules: Vec<(SerializedModule, WorkProduct)>, diff_fncs: Vec, ) -> Result, FatalError> { if !diff_fncs.is_empty() { unimplemented!(); } - back::lto::run_fat(cgcx, modules, cached_modules) + back::lto::run_fat(cgcx, modules) } fn run_thin_lto( From 803ada77a5104911ca972926610d2d4b9f3f511e Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Fri, 4 Jul 2025 14:59:53 +0000 Subject: [PATCH 017/118] Move LTO symbol export calculation from backends to cg_ssa --- messages.ftl | 8 ------ src/back/lto.rs | 74 ++++++------------------------------------------- src/errors.rs | 13 --------- 3 files changed, 8 insertions(+), 87 deletions(-) diff --git a/messages.ftl b/messages.ftl index 55a28bc9493e7..a70ac08f01aee 100644 --- a/messages.ftl +++ b/messages.ftl @@ -3,12 +3,4 @@ codegen_gcc_unwinding_inline_asm = codegen_gcc_copy_bitcode = failed to copy bitcode to object file: {$err} -codegen_gcc_dynamic_linking_with_lto = - cannot prefer dynamic linking when performing LTO - .note = only 'staticlib', 'bin', and 'cdylib' outputs are supported with LTO - -codegen_gcc_lto_disallowed = lto can only be run for executables, cdylibs and static library outputs - -codegen_gcc_lto_dylib = lto cannot be used for `dylib` crate type without `-Zdylib-lto` - codegen_gcc_lto_bitcode_from_rlib = failed to get bitcode from object file for LTO ({$gcc_err}) diff --git a/src/back/lto.rs b/src/back/lto.rs index 9f2842d7abc8f..e075aa8480aa9 100644 --- a/src/back/lto.rs +++ b/src/back/lto.rs @@ -24,36 +24,24 @@ use std::sync::Arc; use gccjit::{Context, OutputKind}; use object::read::archive::ArchiveFile; -use rustc_codegen_ssa::back::lto::{SerializedModule, ThinModule, ThinShared}; -use rustc_codegen_ssa::back::symbol_export; +use rustc_codegen_ssa::back::lto::{ + SerializedModule, ThinModule, ThinShared, exported_symbols_for_lto, +}; use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput}; use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::{ModuleCodegen, ModuleKind, looks_like_rust_object_file}; use rustc_data_structures::memmap::Mmap; use rustc_errors::{DiagCtxtHandle, FatalError}; -use rustc_hir::def_id::LOCAL_CRATE; use rustc_middle::bug; use rustc_middle::dep_graph::WorkProduct; -use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel}; -use rustc_session::config::{CrateType, Lto}; +use rustc_session::config::Lto; use rustc_target::spec::RelocModel; use tempfile::{TempDir, tempdir}; use crate::back::write::save_temp_bitcode; -use crate::errors::{DynamicLinkingWithLTO, LtoBitcodeFromRlib, LtoDisallowed, LtoDylib}; +use crate::errors::LtoBitcodeFromRlib; use crate::{GccCodegenBackend, GccContext, SyncContext, to_gcc_opt_level}; -pub fn crate_type_allows_lto(crate_type: CrateType) -> bool { - match crate_type { - CrateType::Executable - | CrateType::Dylib - | CrateType::Staticlib - | CrateType::Cdylib - | CrateType::Sdylib => true, - CrateType::Rlib | CrateType::ProcMacro => false, - } -} - struct LtoData { // TODO(antoyo): use symbols_below_threshold. //symbols_below_threshold: Vec, @@ -65,15 +53,8 @@ fn prepare_lto( cgcx: &CodegenContext, dcx: DiagCtxtHandle<'_>, ) -> Result { - let export_threshold = match cgcx.lto { - // We're just doing LTO for our one crate - Lto::ThinLocal => SymbolExportLevel::Rust, - - // We're doing LTO for the entire crate graph - Lto::Fat | Lto::Thin => symbol_export::crates_export_threshold(&cgcx.crate_types), - - Lto::No => panic!("didn't request LTO but we're doing LTO"), - }; + // FIXME(bjorn3): Limit LTO exports to these symbols + let _symbols_below_threshold = exported_symbols_for_lto(cgcx, dcx)?; let tmp_path = match tempdir() { Ok(tmp_path) => tmp_path, @@ -83,20 +64,6 @@ fn prepare_lto( } }; - let symbol_filter = &|&(ref name, info): &(String, SymbolExportInfo)| { - if info.level.is_below_threshold(export_threshold) || info.used { - Some(name.clone()) - } else { - None - } - }; - let exported_symbols = cgcx.exported_symbols.as_ref().expect("needs exported symbols for LTO"); - let mut symbols_below_threshold = { - let _timer = cgcx.prof.generic_activity("GCC_lto_generate_symbols_below_threshold"); - exported_symbols[&LOCAL_CRATE].iter().filter_map(symbol_filter).collect::>() - }; - info!("{} symbols to preserve in this crate", symbols_below_threshold.len()); - // If we're performing LTO for the entire crate graph, then for each of our // upstream dependencies, find the corresponding rlib and load the bitcode // from the archive. @@ -105,32 +72,7 @@ fn prepare_lto( // with either fat or thin LTO let mut upstream_modules = Vec::new(); if cgcx.lto != Lto::ThinLocal { - // Make sure we actually can run LTO - for crate_type in cgcx.crate_types.iter() { - if !crate_type_allows_lto(*crate_type) { - dcx.emit_err(LtoDisallowed); - return Err(FatalError); - } - if *crate_type == CrateType::Dylib && !cgcx.opts.unstable_opts.dylib_lto { - dcx.emit_err(LtoDylib); - return Err(FatalError); - } - } - - if cgcx.opts.cg.prefer_dynamic && !cgcx.opts.unstable_opts.dylib_lto { - dcx.emit_err(DynamicLinkingWithLTO); - return Err(FatalError); - } - - for &(cnum, ref path) in cgcx.each_linked_rlib_for_lto.iter() { - let exported_symbols = - cgcx.exported_symbols.as_ref().expect("needs exported symbols for LTO"); - { - let _timer = cgcx.prof.generic_activity("GCC_lto_generate_symbols_below_threshold"); - symbols_below_threshold - .extend(exported_symbols[&cnum].iter().filter_map(symbol_filter)); - } - + for &(_cnum, ref path) in cgcx.each_linked_rlib_for_lto.iter() { let archive_data = unsafe { Mmap::map(File::open(path).expect("couldn't open rlib")).expect("couldn't map rlib") }; diff --git a/src/errors.rs b/src/errors.rs index b7e7343460fbf..0aa16bd88b43a 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -14,19 +14,6 @@ pub(crate) struct CopyBitcode { pub err: std::io::Error, } -#[derive(Diagnostic)] -#[diag(codegen_gcc_dynamic_linking_with_lto)] -#[note] -pub(crate) struct DynamicLinkingWithLTO; - -#[derive(Diagnostic)] -#[diag(codegen_gcc_lto_disallowed)] -pub(crate) struct LtoDisallowed; - -#[derive(Diagnostic)] -#[diag(codegen_gcc_lto_dylib)] -pub(crate) struct LtoDylib; - #[derive(Diagnostic)] #[diag(codegen_gcc_lto_bitcode_from_rlib)] pub(crate) struct LtoBitcodeFromRlib { From df5367810bb1af76f82f9dff4a727bee8e54f3bc Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Sun, 6 Jul 2025 15:57:20 +0000 Subject: [PATCH 018/118] Merge exported_symbols computation into exported_symbols_for_lto And move exported_symbols_for_lto call from backends to cg_ssa. --- src/back/lto.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/back/lto.rs b/src/back/lto.rs index e075aa8480aa9..f957bb7f101ba 100644 --- a/src/back/lto.rs +++ b/src/back/lto.rs @@ -24,9 +24,7 @@ use std::sync::Arc; use gccjit::{Context, OutputKind}; use object::read::archive::ArchiveFile; -use rustc_codegen_ssa::back::lto::{ - SerializedModule, ThinModule, ThinShared, exported_symbols_for_lto, -}; +use rustc_codegen_ssa::back::lto::{SerializedModule, ThinModule, ThinShared}; use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput}; use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::{ModuleCodegen, ModuleKind, looks_like_rust_object_file}; @@ -54,7 +52,7 @@ fn prepare_lto( dcx: DiagCtxtHandle<'_>, ) -> Result { // FIXME(bjorn3): Limit LTO exports to these symbols - let _symbols_below_threshold = exported_symbols_for_lto(cgcx, dcx)?; + let _symbols_below_threshold = &cgcx.exported_symbols_for_lto; let tmp_path = match tempdir() { Ok(tmp_path) => tmp_path, From ecab766c2cc08d22205c01b6ad6c903389ea480d Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Sun, 6 Jul 2025 16:06:10 +0000 Subject: [PATCH 019/118] Move exported_symbols_for_lto out of CodegenContext --- src/back/lto.rs | 3 --- src/lib.rs | 4 ++++ 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/back/lto.rs b/src/back/lto.rs index f957bb7f101ba..d107e56fa35a9 100644 --- a/src/back/lto.rs +++ b/src/back/lto.rs @@ -51,9 +51,6 @@ fn prepare_lto( cgcx: &CodegenContext, dcx: DiagCtxtHandle<'_>, ) -> Result { - // FIXME(bjorn3): Limit LTO exports to these symbols - let _symbols_below_threshold = &cgcx.exported_symbols_for_lto; - let tmp_path = match tempdir() { Ok(tmp_path) => tmp_path, Err(error) => { diff --git a/src/lib.rs b/src/lib.rs index 3fbbaacf1bbb7..c8bf5bd8f67be 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -358,6 +358,8 @@ impl WriteBackendMethods for GccCodegenBackend { fn run_and_optimize_fat_lto( cgcx: &CodegenContext, + // FIXME(bjorn3): Limit LTO exports to these symbols + _exported_symbols_for_lto: &[String], modules: Vec>, diff_fncs: Vec, ) -> Result, FatalError> { @@ -370,6 +372,8 @@ impl WriteBackendMethods for GccCodegenBackend { fn run_thin_lto( cgcx: &CodegenContext, + // FIXME(bjorn3): Limit LTO exports to these symbols + _exported_symbols_for_lto: &[String], modules: Vec<(String, Self::ThinBuffer)>, cached_modules: Vec<(SerializedModule, WorkProduct)>, ) -> Result<(Vec>, Vec), FatalError> { From 28ccfad3a8478c4bf160d9ab30671eb1a4429590 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Sun, 6 Jul 2025 16:59:30 +0000 Subject: [PATCH 020/118] Remove each_linked_rlib_for_lto from CodegenContext --- src/back/lto.rs | 9 ++++++--- src/lib.rs | 7 +++++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/back/lto.rs b/src/back/lto.rs index d107e56fa35a9..d558dfbc1c455 100644 --- a/src/back/lto.rs +++ b/src/back/lto.rs @@ -49,6 +49,7 @@ struct LtoData { fn prepare_lto( cgcx: &CodegenContext, + each_linked_rlib_for_lto: &[PathBuf], dcx: DiagCtxtHandle<'_>, ) -> Result { let tmp_path = match tempdir() { @@ -67,7 +68,7 @@ fn prepare_lto( // with either fat or thin LTO let mut upstream_modules = Vec::new(); if cgcx.lto != Lto::ThinLocal { - for &(_cnum, ref path) in cgcx.each_linked_rlib_for_lto.iter() { + for path in each_linked_rlib_for_lto { let archive_data = unsafe { Mmap::map(File::open(path).expect("couldn't open rlib")).expect("couldn't map rlib") }; @@ -111,11 +112,12 @@ fn save_as_file(obj: &[u8], path: &Path) -> Result<(), LtoBitcodeFromRlib> { /// for further optimization. pub(crate) fn run_fat( cgcx: &CodegenContext, + each_linked_rlib_for_lto: &[PathBuf], modules: Vec>, ) -> Result, FatalError> { let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); - let lto_data = prepare_lto(cgcx, dcx)?; + let lto_data = prepare_lto(cgcx, each_linked_rlib_for_lto, dcx)?; /*let symbols_below_threshold = lto_data.symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::>();*/ fat_lto( @@ -281,12 +283,13 @@ impl ModuleBufferMethods for ModuleBuffer { /// can simply be copied over from the incr. comp. cache. pub(crate) fn run_thin( cgcx: &CodegenContext, + each_linked_rlib_for_lto: &[PathBuf], modules: Vec<(String, ThinBuffer)>, cached_modules: Vec<(SerializedModule, WorkProduct)>, ) -> Result<(Vec>, Vec), FatalError> { let dcx = cgcx.create_dcx(); let dcx = dcx.handle(); - let lto_data = prepare_lto(cgcx, dcx)?; + let lto_data = prepare_lto(cgcx, each_linked_rlib_for_lto, dcx)?; if cgcx.opts.cg.linker_plugin_lto.enabled() { unreachable!( "We should never reach this case if the LTO step \ diff --git a/src/lib.rs b/src/lib.rs index c8bf5bd8f67be..71765c5113811 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -81,6 +81,7 @@ mod type_of; use std::any::Any; use std::fmt::Debug; use std::ops::Deref; +use std::path::PathBuf; #[cfg(not(feature = "master"))] use std::sync::atomic::AtomicBool; #[cfg(not(feature = "master"))] @@ -360,6 +361,7 @@ impl WriteBackendMethods for GccCodegenBackend { cgcx: &CodegenContext, // FIXME(bjorn3): Limit LTO exports to these symbols _exported_symbols_for_lto: &[String], + each_linked_rlib_for_lto: &[PathBuf], modules: Vec>, diff_fncs: Vec, ) -> Result, FatalError> { @@ -367,17 +369,18 @@ impl WriteBackendMethods for GccCodegenBackend { unimplemented!(); } - back::lto::run_fat(cgcx, modules) + back::lto::run_fat(cgcx, each_linked_rlib_for_lto, modules) } fn run_thin_lto( cgcx: &CodegenContext, // FIXME(bjorn3): Limit LTO exports to these symbols _exported_symbols_for_lto: &[String], + each_linked_rlib_for_lto: &[PathBuf], modules: Vec<(String, Self::ThinBuffer)>, cached_modules: Vec<(SerializedModule, WorkProduct)>, ) -> Result<(Vec>, Vec), FatalError> { - back::lto::run_thin(cgcx, modules, cached_modules) + back::lto::run_thin(cgcx, each_linked_rlib_for_lto, modules, cached_modules) } fn print_pass_timings(&self) { From c1be95ca0c2be04060da7a01f6892e5e6f2ef9dc Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Mon, 21 Jul 2025 14:16:43 -0400 Subject: [PATCH 021/118] Add missing inline attribute --- src/allocator.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/allocator.rs b/src/allocator.rs index 0d8dc93274f9b..66258390d9092 100644 --- a/src/allocator.rs +++ b/src/allocator.rs @@ -99,11 +99,14 @@ fn create_const_value_function( let func = context.new_function(None, FunctionType::Exported, output, &[], name, false); #[cfg(feature = "master")] - func.add_attribute(FnAttribute::Visibility(symbol_visibility_to_gcc( - tcx.sess.default_visibility(), - ))); + { + func.add_attribute(FnAttribute::Visibility(symbol_visibility_to_gcc( + tcx.sess.default_visibility(), + ))); - func.add_attribute(FnAttribute::AlwaysInline); + func.add_attribute(FnAttribute::AlwaysInline); + func.add_attribute(FnAttribute::Inline); + } if tcx.sess.must_emit_unwind_tables() { // TODO(antoyo): emit unwind tables. From cf80eeec1650add1e24f114841154484fa051b70 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Mon, 21 Jul 2025 14:42:45 -0400 Subject: [PATCH 022/118] Fix clippy warnings --- build_system/src/abi_test.rs | 2 +- build_system/src/fuzz.rs | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/build_system/src/abi_test.rs b/build_system/src/abi_test.rs index 3c1531be27a52..a85886d87f365 100644 --- a/build_system/src/abi_test.rs +++ b/build_system/src/abi_test.rs @@ -31,7 +31,7 @@ pub fn run() -> Result<(), String> { Some("clones/abi-cafe".as_ref()), true, ) - .map_err(|err| (format!("Git clone failed with message: {err:?}!")))?; + .map_err(|err| format!("Git clone failed with message: {err:?}!"))?; // Configure abi-cafe to use the exact same rustc version we use - this is crucial. // Otherwise, the concept of ABI compatibility becomes meanignless. std::fs::copy("rust-toolchain", "clones/abi-cafe/rust-toolchain") diff --git a/build_system/src/fuzz.rs b/build_system/src/fuzz.rs index 453211366b31b..9714ce29af909 100644 --- a/build_system/src/fuzz.rs +++ b/build_system/src/fuzz.rs @@ -43,18 +43,18 @@ pub fn run() -> Result<(), String> { "--start" => { start = str::parse(&args.next().ok_or_else(|| "Fuzz start not provided!".to_string())?) - .map_err(|err| (format!("Fuzz start not a number {err:?}!")))?; + .map_err(|err| format!("Fuzz start not a number {err:?}!"))?; } "--count" => { count = str::parse(&args.next().ok_or_else(|| "Fuzz count not provided!".to_string())?) - .map_err(|err| (format!("Fuzz count not a number {err:?}!")))?; + .map_err(|err| format!("Fuzz count not a number {err:?}!"))?; } "-j" | "--jobs" => { threads = str::parse( &args.next().ok_or_else(|| "Fuzz thread count not provided!".to_string())?, ) - .map_err(|err| (format!("Fuzz thread count not a number {err:?}!")))?; + .map_err(|err| format!("Fuzz thread count not a number {err:?}!"))?; } _ => return Err(format!("Unknown option {arg}")), } @@ -66,7 +66,7 @@ pub fn run() -> Result<(), String> { Some("clones/rustlantis".as_ref()), true, ) - .map_err(|err| (format!("Git clone failed with message: {err:?}!")))?; + .map_err(|err| format!("Git clone failed with message: {err:?}!"))?; // Ensure that we are on the newest rustlantis commit. let cmd: &[&dyn AsRef] = &[&"git", &"pull", &"origin"]; From 18cc4f06a5252f0868be4ea620076247da68077c Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Mon, 21 Jul 2025 14:45:00 -0400 Subject: [PATCH 023/118] Fix spelling mistakes --- src/lib.rs | 4 ++-- tools/cspell_dicts/rustc_codegen_gcc.txt | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index af416929ea73d..5e65a36f2737a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -360,9 +360,9 @@ impl WriteBackendMethods for GccCodegenBackend { cgcx: &CodegenContext, modules: Vec>, cached_modules: Vec<(SerializedModule, WorkProduct)>, - diff_fncs: Vec, + diff_functions: Vec, ) -> Result, FatalError> { - if !diff_fncs.is_empty() { + if !diff_functions.is_empty() { unimplemented!(); } diff --git a/tools/cspell_dicts/rustc_codegen_gcc.txt b/tools/cspell_dicts/rustc_codegen_gcc.txt index 31023e50ffa18..b19d7f67eab3b 100644 --- a/tools/cspell_dicts/rustc_codegen_gcc.txt +++ b/tools/cspell_dicts/rustc_codegen_gcc.txt @@ -8,6 +8,7 @@ clzll cmse codegened csky +ctfe ctlz ctpop cttz @@ -47,6 +48,7 @@ mavx mcmodel minimumf minnumf +miri monomorphization monomorphizations monomorphized From af8cb1da142645c1d546e7c9ad66b17b50e16ffe Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Mon, 21 Jul 2025 14:22:51 +0200 Subject: [PATCH 024/118] Rename `tests/assembly` into `tests/assembly-llvm` --- build_system/src/test.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build_system/src/test.rs b/build_system/src/test.rs index cbb0f94938383..bc0fdd40b6e85 100644 --- a/build_system/src/test.rs +++ b/build_system/src/test.rs @@ -588,7 +588,7 @@ fn asm_tests(env: &Env, args: &TestArg) -> Result<(), String> { &"always", &"--stage", &"0", - &"tests/assembly/asm", + &"tests/assembly-llvm/asm", &"--compiletest-rustc-args", &rustc_args, ], From d466953088a41ce98acfcd53961ef07b887f074a Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Tue, 22 Jul 2025 11:03:49 -0400 Subject: [PATCH 025/118] Fix failing UI tests --- tests/failing-ui-tests.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/failing-ui-tests.txt b/tests/failing-ui-tests.txt index 6979c04d53430..f7040055874eb 100644 --- a/tests/failing-ui-tests.txt +++ b/tests/failing-ui-tests.txt @@ -10,7 +10,7 @@ tests/ui/iterators/iter-sum-overflow-overflow-checks.rs tests/ui/mir/mir_drop_order.rs tests/ui/mir/mir_let_chains_drop_order.rs tests/ui/mir/mir_match_guard_let_chains_drop_order.rs -tests/ui/oom_unwind.rs +tests/ui/panics/oom-panic-unwind.rs tests/ui/panic-runtime/abort-link-to-unwinding-crates.rs tests/ui/panic-runtime/abort.rs tests/ui/panic-runtime/link-to-abort.rs From a4fb5794e60883625fed8d25e9e55c649f9fb70c Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Tue, 22 Jul 2025 11:14:31 -0400 Subject: [PATCH 026/118] Fix compilation of overflow addition --- src/int.rs | 30 +++++++++++++++++++++++++++++- src/lib.rs | 1 + 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/src/int.rs b/src/int.rs index 6f21ce9352b59..53e49e71e2bfd 100644 --- a/src/int.rs +++ b/src/int.rs @@ -4,12 +4,15 @@ // cSpell:words cmpti divti modti mulodi muloti udivti umodti -use gccjit::{BinaryOp, ComparisonOp, FunctionType, Location, RValue, ToRValue, Type, UnaryOp}; +use gccjit::{ + BinaryOp, CType, ComparisonOp, FunctionType, Location, RValue, ToRValue, Type, UnaryOp, +}; use rustc_abi::{CanonAbi, Endian, ExternAbi}; use rustc_codegen_ssa::common::{IntPredicate, TypeKind}; use rustc_codegen_ssa::traits::{BackendTypes, BaseTypeCodegenMethods, BuilderMethods, OverflowOp}; use rustc_middle::ty::{self, Ty}; use rustc_target::callconv::{ArgAbi, ArgAttributes, FnAbi, PassMode}; +use rustc_type_ir::{Interner, TyKind}; use crate::builder::{Builder, ToGccComp}; use crate::common::{SignType, TypeReflection}; @@ -351,6 +354,9 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { // TODO(antoyo): is it correct to use rhs type instead of the parameter typ? .new_local(self.location, rhs.get_type(), "binopResult") .get_address(self.location); + let new_type = type_kind_to_gcc_type(new_kind); + let new_type = self.context.new_c_type(new_type); + let lhs = self.context.new_cast(self.location, lhs, new_type); let overflow = self.overflow_call(intrinsic, &[lhs, rhs, res], None); (res.dereference(self.location).to_rvalue(), overflow) } @@ -1042,3 +1048,25 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { self.context.new_array_constructor(None, typ, &values) } } + +fn type_kind_to_gcc_type(kind: TyKind) -> CType { + use rustc_middle::ty::IntTy::*; + use rustc_middle::ty::UintTy::*; + use rustc_middle::ty::{Int, Uint}; + + match kind { + Int(I8) => CType::Int8t, + Int(I16) => CType::Int16t, + Int(I32) => CType::Int32t, + Int(I64) => CType::Int64t, + Int(I128) => CType::Int128t, + + Uint(U8) => CType::UInt8t, + Uint(U16) => CType::UInt16t, + Uint(U32) => CType::UInt32t, + Uint(U64) => CType::UInt64t, + Uint(U128) => CType::UInt128t, + + _ => unimplemented!("Kind: {:?}", kind), + } +} diff --git a/src/lib.rs b/src/lib.rs index 5e65a36f2737a..92808123d6a90 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -51,6 +51,7 @@ extern crate rustc_session; extern crate rustc_span; extern crate rustc_symbol_mangling; extern crate rustc_target; +extern crate rustc_type_ir; // This prevents duplicating functions and statics that are already part of the host rustc process. #[allow(unused_extern_crates)] From 27f3a97747cd46676c4fe2a4afd77009f3b98a46 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Tue, 22 Jul 2025 11:16:06 -0400 Subject: [PATCH 027/118] Use a bitcast in Builder::ret to support non-native integers --- src/builder.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index a4ec4bf8deac4..3cd464b61e142 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -540,8 +540,8 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { fn ret(&mut self, mut value: RValue<'gcc>) { let expected_return_type = self.current_func().get_return_type(); if !expected_return_type.is_compatible_with(value.get_type()) { - // NOTE: due to opaque pointers now being used, we need to cast here. - value = self.context.new_cast(self.location, value, expected_return_type); + // NOTE: due to opaque pointers now being used, we need to bitcast here. + value = self.context.new_bitcast(self.location, value, expected_return_type); } self.llbb().end_with_return(self.location, value); } From a5bd9d6635831b09457ce39b1eacdc30ec306cd4 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Tue, 22 Jul 2025 11:17:43 -0400 Subject: [PATCH 028/118] Fix spelling mistake --- tools/cspell_dicts/rustc_codegen_gcc.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/cspell_dicts/rustc_codegen_gcc.txt b/tools/cspell_dicts/rustc_codegen_gcc.txt index b19d7f67eab3b..4fb018b3ecd87 100644 --- a/tools/cspell_dicts/rustc_codegen_gcc.txt +++ b/tools/cspell_dicts/rustc_codegen_gcc.txt @@ -26,6 +26,7 @@ fwrapv gimple hrtb immediates +interner liblto llbb llcx From 9ea18272eceae790619661b421dcec7fe1f0e41b Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Tue, 22 Jul 2025 11:26:31 -0400 Subject: [PATCH 029/118] Fix sysroot compilation in release mode --- src/int.rs | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/src/int.rs b/src/int.rs index 53e49e71e2bfd..5180f1e6d3ed1 100644 --- a/src/int.rs +++ b/src/int.rs @@ -170,9 +170,9 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { if a_type.is_vector() { // Vector types need to be bitcast. // TODO(antoyo): perhaps use __builtin_convertvector for vector casting. - b = self.context.new_bitcast(self.location, b, a.get_type()); + b = self.context.new_bitcast(self.location, b, a_type); } else { - b = self.context.new_cast(self.location, b, a.get_type()); + b = self.context.new_cast(self.location, b, a_type); } } self.context.new_binary_op(self.location, operation, a_type, a, b) @@ -219,13 +219,22 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { operation_name: &str, signed: bool, a: RValue<'gcc>, - b: RValue<'gcc>, + mut b: RValue<'gcc>, ) -> RValue<'gcc> { let a_type = a.get_type(); let b_type = b.get_type(); if (self.is_native_int_type_or_bool(a_type) && self.is_native_int_type_or_bool(b_type)) || (a_type.is_vector() && b_type.is_vector()) { + if !a_type.is_compatible_with(b_type) { + if a_type.is_vector() { + // Vector types need to be bitcast. + // TODO(antoyo): perhaps use __builtin_convertvector for vector casting. + b = self.context.new_bitcast(self.location, b, a_type); + } else { + b = self.context.new_cast(self.location, b, a_type); + } + } self.context.new_binary_op(self.location, operation, a_type, a, b) } else { debug_assert!(a_type.dyncast_array().is_some()); @@ -626,7 +635,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { } } - pub fn gcc_xor(&self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> { + pub fn gcc_xor(&self, a: RValue<'gcc>, mut b: RValue<'gcc>) -> RValue<'gcc> { let a_type = a.get_type(); let b_type = b.get_type(); if a_type.is_vector() && b_type.is_vector() { @@ -634,6 +643,9 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { a ^ b } else if self.is_native_int_type_or_bool(a_type) && self.is_native_int_type_or_bool(b_type) { + if !a_type.is_compatible_with(b_type) { + b = self.context.new_cast(self.location, b, a_type); + } a ^ b } else { self.concat_low_high_rvalues( From de5cf685461967da8351a4d54ba96c5da5349eae Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Tue, 22 Jul 2025 11:27:07 -0400 Subject: [PATCH 030/118] Remove failing UI test --- tests/failing-ui-tests.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/failing-ui-tests.txt b/tests/failing-ui-tests.txt index f7040055874eb..9a806e5ba5226 100644 --- a/tests/failing-ui-tests.txt +++ b/tests/failing-ui-tests.txt @@ -14,7 +14,6 @@ tests/ui/panics/oom-panic-unwind.rs tests/ui/panic-runtime/abort-link-to-unwinding-crates.rs tests/ui/panic-runtime/abort.rs tests/ui/panic-runtime/link-to-abort.rs -tests/ui/unwind-no-uwtable.rs tests/ui/parser/unclosed-delimiter-in-dep.rs tests/ui/consts/missing_span_in_backtrace.rs tests/ui/drop/dynamic-drop.rs From ba18e20501d18c05859534b93796c4ca5100ad8f Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Tue, 22 Jul 2025 11:28:44 -0400 Subject: [PATCH 031/118] Remove failing run-make test --- tests/failing-run-make-tests.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/failing-run-make-tests.txt b/tests/failing-run-make-tests.txt index 842533cd3c62c..29032b321fa72 100644 --- a/tests/failing-run-make-tests.txt +++ b/tests/failing-run-make-tests.txt @@ -6,7 +6,6 @@ tests/run-make/doctests-keep-binaries/ tests/run-make/doctests-runtool/ tests/run-make/emit-shared-files/ tests/run-make/exit-code/ -tests/run-make/issue-22131/ tests/run-make/issue-64153/ tests/run-make/llvm-ident/ tests/run-make/native-link-modifier-bundle/ From 041be62e5f365434741195e5870ba355103a44d0 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Tue, 22 Jul 2025 11:44:42 -0400 Subject: [PATCH 032/118] Add failing UI tests --- tests/failing-ui-tests.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/failing-ui-tests.txt b/tests/failing-ui-tests.txt index 9a806e5ba5226..4dc9507f28f15 100644 --- a/tests/failing-ui-tests.txt +++ b/tests/failing-ui-tests.txt @@ -81,3 +81,6 @@ tests/ui/coroutine/panic-drops.rs tests/ui/coroutine/panic-safe.rs tests/ui/process/nofile-limit.rs tests/ui/simd/intrinsic/generic-arithmetic-pass.rs +tests/ui/linking/no-gc-encapsulation-symbols.rs +tests/ui/panics/unwind-force-no-unwind-tables.rs +tests/ui/attributes/fn-align-dyn.rs From 3c35d9b3eb46e6fa5645f93a6d04d13374f58b67 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Tue, 22 Jul 2025 11:44:52 -0400 Subject: [PATCH 033/118] Add missing cast in gcc_checked_binop --- src/int.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/int.rs b/src/int.rs index 5180f1e6d3ed1..64b612553cfc7 100644 --- a/src/int.rs +++ b/src/int.rs @@ -366,6 +366,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { let new_type = type_kind_to_gcc_type(new_kind); let new_type = self.context.new_c_type(new_type); let lhs = self.context.new_cast(self.location, lhs, new_type); + let rhs = self.context.new_cast(self.location, rhs, new_type); let overflow = self.overflow_call(intrinsic, &[lhs, rhs, res], None); (res.dereference(self.location).to_rvalue(), overflow) } From d3a61f88e87b69574b691405024eacaa0d134518 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Tue, 22 Jul 2025 13:06:52 -0400 Subject: [PATCH 034/118] Remove failing UI test --- tests/failing-lto-tests.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/failing-lto-tests.txt b/tests/failing-lto-tests.txt index b9126fb73a775..b1ae1e91078be 100644 --- a/tests/failing-lto-tests.txt +++ b/tests/failing-lto-tests.txt @@ -28,6 +28,5 @@ tests/ui/macros/macro-comma-behavior-rpass.rs tests/ui/macros/rfc-2011-nicer-assert-messages/assert-with-custom-errors-does-not-create-unnecessary-code.rs tests/ui/macros/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs tests/ui/macros/stringify.rs -tests/ui/reexport-test-harness-main.rs tests/ui/rfcs/rfc-1937-termination-trait/termination-trait-in-test.rs tests/ui/binding/fn-arg-incomplete-pattern-drop-order.rs From d05542af7a07de7e58dc9a695125a3b50146ea82 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Tue, 22 Jul 2025 13:07:12 -0400 Subject: [PATCH 035/118] Add missing cast in gcc_checked_binop --- src/int.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/int.rs b/src/int.rs index 64b612553cfc7..9dc1fcf5fc8bd 100644 --- a/src/int.rs +++ b/src/int.rs @@ -367,6 +367,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { let new_type = self.context.new_c_type(new_type); let lhs = self.context.new_cast(self.location, lhs, new_type); let rhs = self.context.new_cast(self.location, rhs, new_type); + let res = self.context.new_cast(self.location, res, new_type.make_pointer()); let overflow = self.overflow_call(intrinsic, &[lhs, rhs, res], None); (res.dereference(self.location).to_rvalue(), overflow) } From 2b640216aea9527c67c82cfc781d979247ac6028 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Wed, 23 Jul 2025 09:56:54 -0400 Subject: [PATCH 036/118] Fix gcc_icmp with non-native integers --- src/int.rs | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/src/int.rs b/src/int.rs index 9dc1fcf5fc8bd..9fb7f6bad6844 100644 --- a/src/int.rs +++ b/src/int.rs @@ -494,11 +494,27 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { let lhs_low = self.context.new_cast(self.location, self.low(lhs), unsigned_type); let rhs_low = self.context.new_cast(self.location, self.low(rhs), unsigned_type); + let mut lhs_high = self.high(lhs); + let mut rhs_high = self.high(rhs); + + match op { + IntPredicate::IntUGT + | IntPredicate::IntUGE + | IntPredicate::IntULT + | IntPredicate::IntULE => { + lhs_high = self.context.new_cast(self.location, lhs_high, unsigned_type); + rhs_high = self.context.new_cast(self.location, rhs_high, unsigned_type); + } + // TODO(antoyo): we probably need to handle signed comparison for unsigned + // integers. + _ => (), + } + let condition = self.context.new_comparison( self.location, ComparisonOp::LessThan, - self.high(lhs), - self.high(rhs), + lhs_high, + rhs_high, ); self.llbb().end_with_conditional(self.location, condition, block1, block2); @@ -512,8 +528,8 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { let condition = self.context.new_comparison( self.location, ComparisonOp::GreaterThan, - self.high(lhs), - self.high(rhs), + lhs_high, + rhs_high, ); block2.end_with_conditional(self.location, condition, block3, block4); From 35404461e29343ec61eb79ca8408ae31f8941452 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Fri, 25 Jul 2025 10:57:38 +0800 Subject: [PATCH 037/118] Fix gen panics doc template for debug_assert And add assert_eq, assert_ne, assert_matches support Input: ```rust pub fn $0foo(x: bool) { debug_assert!(x); } ``` Old: ```rust /// . /// /// # Panics /// /// Panics if . pub fn foo(x: bool) { debug_assert!(x); } ``` This PR fixes: ```rust /// . pub fn foo(x: bool) { debug_assert!(x); } ``` --- .../generate_documentation_template.rs | 68 +++++++++++++++++-- 1 file changed, 62 insertions(+), 6 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs index d4d1b3490cb64..68587f0cb5bc5 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs @@ -313,12 +313,28 @@ fn crate_name(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option { /// `None` if function without a body; some bool to guess if function can panic fn can_panic(ast_func: &ast::Fn) -> Option { let body = ast_func.body()?.to_string(); - let can_panic = body.contains("panic!(") - // FIXME it would be better to not match `debug_assert*!` macro invocations - || body.contains("assert!(") - || body.contains(".unwrap()") - || body.contains(".expect("); - Some(can_panic) + let mut iter = body.chars(); + let assert_postfix = |s| { + ["!(", "_eq!(", "_ne!(", "_matches!("].iter().any(|postfix| str::starts_with(s, postfix)) + }; + + while !iter.as_str().is_empty() { + let s = iter.as_str(); + iter.next(); + if s.strip_prefix("debug_assert").is_some_and(assert_postfix) { + iter.nth(10); + continue; + } + if s.strip_prefix("assert").is_some_and(assert_postfix) + || s.starts_with("panic!(") + || s.starts_with(".unwrap()") + || s.starts_with(".expect(") + { + return Some(true); + } + } + + Some(false) } /// Helper function to get the name that should be given to `self` arguments @@ -677,6 +693,24 @@ pub fn panics_if(a: bool) { ); } + #[test] + fn guesses_debug_assert_macro_cannot_panic() { + check_assist( + generate_documentation_template, + r#" +pub fn $0debug_panics_if_not(a: bool) { + debug_assert!(a == true); +} +"#, + r#" +/// . +pub fn debug_panics_if_not(a: bool) { + debug_assert!(a == true); +} +"#, + ); + } + #[test] fn guesses_assert_macro_can_panic() { check_assist( @@ -699,6 +733,28 @@ pub fn panics_if_not(a: bool) { ); } + #[test] + fn guesses_assert_eq_macro_can_panic() { + check_assist( + generate_documentation_template, + r#" +pub fn $0panics_if_not(a: bool) { + assert_eq!(a, true); +} +"#, + r#" +/// . +/// +/// # Panics +/// +/// Panics if . +pub fn panics_if_not(a: bool) { + assert_eq!(a, true); +} +"#, + ); + } + #[test] fn guesses_unwrap_can_panic() { check_assist( From d852f7cb123cde2f0ed12ef09ef3bf58de391a4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joel=20Wejdenst=C3=A5l?= Date: Fri, 25 Jul 2025 21:21:42 +0200 Subject: [PATCH 038/118] Implement support for explicit tail calls in the MIR block builders and the LLVM codegen backend. --- messages.ftl | 2 ++ src/builder.rs | 15 +++++++++++++++ src/errors.rs | 4 ++++ 3 files changed, 21 insertions(+) diff --git a/messages.ftl b/messages.ftl index a70ac08f01aee..b9b77b7d18c66 100644 --- a/messages.ftl +++ b/messages.ftl @@ -4,3 +4,5 @@ codegen_gcc_unwinding_inline_asm = codegen_gcc_copy_bitcode = failed to copy bitcode to object file: {$err} codegen_gcc_lto_bitcode_from_rlib = failed to get bitcode from object file for LTO ({$gcc_err}) + +codegen_gcc_explicit_tail_calls_unsupported = explicit tail calls with the 'become' keyword are not implemented in the GCC backend diff --git a/src/builder.rs b/src/builder.rs index a4ec4bf8deac4..4aee211e2efad 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -34,6 +34,7 @@ use rustc_target::spec::{HasTargetSpec, HasX86AbiOpt, Target, X86Abi}; use crate::common::{SignType, TypeReflection, type_is_pointer}; use crate::context::CodegenCx; +use crate::errors; use crate::intrinsic::llvm; use crate::type_of::LayoutGccExt; @@ -1742,6 +1743,20 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { call } + fn tail_call( + &mut self, + _llty: Self::Type, + _fn_attrs: Option<&CodegenFnAttrs>, + _fn_abi: &FnAbi<'tcx, Ty<'tcx>>, + _llfn: Self::Value, + _args: &[Self::Value], + _funclet: Option<&Self::Funclet>, + _instance: Option>, + ) { + // FIXME: implement support for explicit tail calls like rustc_codegen_llvm. + self.tcx.dcx().emit_fatal(errors::ExplicitTailCallsUnsupported); + } + fn zext(&mut self, value: RValue<'gcc>, dest_typ: Type<'gcc>) -> RValue<'gcc> { // FIXME(antoyo): this does not zero-extend. self.gcc_int_cast(value, dest_typ) diff --git a/src/errors.rs b/src/errors.rs index 0aa16bd88b43a..b252c39c0c05c 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -19,3 +19,7 @@ pub(crate) struct CopyBitcode { pub(crate) struct LtoBitcodeFromRlib { pub gcc_err: String, } + +#[derive(Diagnostic)] +#[diag(codegen_gcc_explicit_tail_calls_unsupported)] +pub(crate) struct ExplicitTailCallsUnsupported; From 013b3eb8055d730f152e9a26df0317ef16f0f86b Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Sun, 6 Jul 2025 18:03:07 +0000 Subject: [PATCH 039/118] Remove support for -Zcombine-cgu Nobody seems to actually use this, while still adding some extra complexity to the already rather complex codegen coordinator code. It is also not supported by any backend other than the LLVM backend. --- src/back/write.rs | 9 --------- src/lib.rs | 8 -------- 2 files changed, 17 deletions(-) diff --git a/src/back/write.rs b/src/back/write.rs index 113abe70805b0..c1231142c6585 100644 --- a/src/back/write.rs +++ b/src/back/write.rs @@ -4,7 +4,6 @@ use gccjit::{Context, OutputKind}; use rustc_codegen_ssa::back::link::ensure_removed; use rustc_codegen_ssa::back::write::{BitcodeSection, CodegenContext, EmitObj, ModuleConfig}; use rustc_codegen_ssa::{CompiledModule, ModuleCodegen}; -use rustc_errors::DiagCtxtHandle; use rustc_fs_util::link_or_copy; use rustc_session::config::OutputType; use rustc_span::fatal_error::FatalError; @@ -258,14 +257,6 @@ pub(crate) fn codegen( )) } -pub(crate) fn link( - _cgcx: &CodegenContext, - _dcx: DiagCtxtHandle<'_>, - mut _modules: Vec>, -) -> Result, FatalError> { - unimplemented!(); -} - pub(crate) fn save_temp_bitcode( cgcx: &CodegenContext, _module: &ModuleCodegen, diff --git a/src/lib.rs b/src/lib.rs index 71765c5113811..a312068250073 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -426,14 +426,6 @@ impl WriteBackendMethods for GccCodegenBackend { fn serialize_module(_module: ModuleCodegen) -> (String, Self::ModuleBuffer) { unimplemented!(); } - - fn run_link( - cgcx: &CodegenContext, - dcx: DiagCtxtHandle<'_>, - modules: Vec>, - ) -> Result, FatalError> { - back::write::link(cgcx, dcx, modules) - } } /// This is the entrypoint for a hot plugged rustc_codegen_gccjit From 1d5a582ba03f2ecda47a2c904bedb17adf5ee2dd Mon Sep 17 00:00:00 2001 From: Hmikihiro <34ttrweoewiwe28@gmail.com> Date: Sun, 27 Jul 2025 21:53:35 +0900 Subject: [PATCH 040/118] refactor: conpare text of name_ref instead of syntax name_ref --- .../crates/ide-assists/src/handlers/inline_type_alias.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs index 4511072b041b1..f667d6259354b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -322,7 +322,10 @@ fn create_replacement( replacements.push((syntax.clone(), new_lifetime.syntax().clone_for_update())); } - } else if let Some(replacement_syntax) = const_and_type_map.0.get(syntax_str) { + } else if let Some(name_ref) = ast::NameRef::cast(syntax.clone()) { + let Some(replacement_syntax) = const_and_type_map.0.get(&name_ref.to_string()) else { + continue; + }; let new_string = replacement_syntax.to_string(); let new = if new_string == "_" { make::wildcard_pat().syntax().clone_for_update() From b3ea82f2eb0a5d9b895111b8b1eb165cd4da58a3 Mon Sep 17 00:00:00 2001 From: Hmikihiro <34ttrweoewiwe28@gmail.com> Date: Sun, 27 Jul 2025 22:04:25 +0900 Subject: [PATCH 041/118] Migrate `inline_type_alias` assist to use `SyntaxEditor` --- .../src/handlers/inline_type_alias.rs | 63 ++++++++++--------- 1 file changed, 34 insertions(+), 29 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs index f667d6259354b..62535531435d8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -9,10 +9,10 @@ use ide_db::{ search::FileReference, }; use itertools::Itertools; +use syntax::syntax_editor::SyntaxEditor; use syntax::{ AstNode, NodeOrToken, SyntaxNode, ast::{self, HasGenericParams, HasName, make}, - ted, }; use crate::{ @@ -68,37 +68,41 @@ pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>) let mut definition_deleted = false; let mut inline_refs_for_file = |file_id, refs: Vec| { - builder.edit_file(file_id); + let source = ctx.sema.parse(file_id); + let mut editor = builder.make_editor(source.syntax()); let (path_types, path_type_uses) = split_refs_and_uses(builder, refs, |path_type| { path_type.syntax().ancestors().nth(3).and_then(ast::PathType::cast) }); - path_type_uses .iter() .flat_map(ast_to_remove_for_path_in_use_stmt) - .for_each(|x| builder.delete(x.syntax().text_range())); + .for_each(|x| editor.delete(x.syntax())); + for (target, replacement) in path_types.into_iter().filter_map(|path_type| { - let replacement = inline(&ast_alias, &path_type)?.to_text(&concrete_type); - let target = path_type.syntax().text_range(); + let replacement = + inline(&ast_alias, &path_type)?.replace_generic(&concrete_type); + let target = path_type.syntax().clone(); Some((target, replacement)) }) { - builder.replace(target, replacement); + editor.replace(target, replacement); } - if file_id == ctx.vfs_file_id() { - builder.delete(ast_alias.syntax().text_range()); + if file_id.file_id(ctx.db()) == ctx.vfs_file_id() { + editor.delete(ast_alias.syntax()); definition_deleted = true; } + builder.add_file_edits(file_id.file_id(ctx.db()), editor); }; for (file_id, refs) in usages.into_iter() { - inline_refs_for_file(file_id.file_id(ctx.db()), refs); + inline_refs_for_file(file_id, refs); } if !definition_deleted { - builder.edit_file(ctx.vfs_file_id()); - builder.delete(ast_alias.syntax().text_range()); + let mut editor = builder.make_editor(ast_alias.syntax()); + editor.delete(ast_alias.syntax()); + builder.add_file_edits(ctx.vfs_file_id(), editor) } }, ) @@ -146,23 +150,26 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O } } - let target = alias_instance.syntax().text_range(); - acc.add( AssistId::refactor_inline("inline_type_alias"), "Inline type alias", - target, - |builder| builder.replace(target, replacement.to_text(&concrete_type)), + alias_instance.syntax().text_range(), + |builder| { + let mut editor = builder.make_editor(alias_instance.syntax()); + let replace = replacement.replace_generic(&concrete_type); + editor.replace(alias_instance.syntax(), replace); + builder.add_file_edits(ctx.vfs_file_id(), editor); + }, ) } impl Replacement { - fn to_text(&self, concrete_type: &ast::Type) -> String { + fn replace_generic(&self, concrete_type: &ast::Type) -> SyntaxNode { match self { Replacement::Generic { lifetime_map, const_and_type_map } => { create_replacement(lifetime_map, const_and_type_map, concrete_type) } - Replacement::Plain => concrete_type.to_string(), + Replacement::Plain => concrete_type.syntax().clone_subtree().clone_for_update(), } } } @@ -299,15 +306,14 @@ fn create_replacement( lifetime_map: &LifetimeMap, const_and_type_map: &ConstAndTypeMap, concrete_type: &ast::Type, -) -> String { - let updated_concrete_type = concrete_type.clone_for_update(); - let mut replacements = Vec::new(); - let mut removals = Vec::new(); +) -> SyntaxNode { + let updated_concrete_type = concrete_type.syntax().clone_subtree(); + let mut editor = SyntaxEditor::new(updated_concrete_type.clone()); - for syntax in updated_concrete_type.syntax().descendants() { - let syntax_string = syntax.to_string(); - let syntax_str = syntax_string.as_str(); + let mut replacements: Vec<(SyntaxNode, SyntaxNode)> = Vec::new(); + let mut removals: Vec> = Vec::new(); + for syntax in updated_concrete_type.descendants() { if let Some(old_lifetime) = ast::Lifetime::cast(syntax.clone()) { if let Some(new_lifetime) = lifetime_map.0.get(&old_lifetime.to_string()) { if new_lifetime.text() == "'_" { @@ -338,14 +344,13 @@ fn create_replacement( } for (old, new) in replacements { - ted::replace(old, new); + editor.replace(old, new); } for syntax in removals { - ted::remove(syntax); + editor.delete(syntax); } - - updated_concrete_type.to_string() + editor.finish().new_root().clone() } fn get_type_alias(ctx: &AssistContext<'_>, path: &ast::PathType) -> Option { From b1b7077b42668ebee51c28eac86786567711ab91 Mon Sep 17 00:00:00 2001 From: Hayashi Mikihiro <34ttrweoewiwe28@gmail.com> Date: Sat, 26 Jul 2025 22:31:41 +0900 Subject: [PATCH 042/118] remove ted from convert_tuple_struct_to_named_struct Signed-off-by: Hayashi Mikihiro <34ttrweoewiwe28@gmail.com> --- .../handlers/convert_tuple_struct_to_named_struct.rs | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 80756197fb700..44361560f4864 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -3,7 +3,8 @@ use ide_db::defs::{Definition, NameRefClass}; use syntax::{ SyntaxKind, SyntaxNode, ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility}, - match_ast, ted, + match_ast, + syntax_editor::{Position, SyntaxEditor}, }; use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder}; @@ -93,12 +94,13 @@ fn edit_struct_def( names: Vec, ) { let record_fields = tuple_fields.fields().zip(names).filter_map(|(f, name)| { - let field = ast::make::record_field(f.visibility(), name, f.ty()?).clone_for_update(); - ted::insert_all( - ted::Position::first_child_of(field.syntax()), + let field = ast::make::record_field(f.visibility(), name, f.ty()?); + let mut editor = SyntaxEditor::new(field.syntax().clone()); + editor.insert_all( + Position::first_child_of(field.syntax()), f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(), ); - Some(field) + ast::RecordField::cast(editor.finish().new_root().clone()) }); let record_fields = ast::make::record_field_list(record_fields); let tuple_fields_text_range = tuple_fields.syntax().text_range(); From 85486a7bff683fd9ec241a92ca75f38659b4e76c Mon Sep 17 00:00:00 2001 From: Hayashi Mikihiro <34ttrweoewiwe28@gmail.com> Date: Sat, 26 Jul 2025 23:20:31 +0900 Subject: [PATCH 043/118] migrate `fn edit_struct_def` in `convert_tuple_struct_to_named_struct` to SyntaxEditor Signed-off-by: Hayashi Mikihiro <34ttrweoewiwe28@gmail.com> --- .../convert_tuple_struct_to_named_struct.rs | 54 +++++++++---------- 1 file changed, 24 insertions(+), 30 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 44361560f4864..6d729c9ea995e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -1,10 +1,10 @@ use either::Either; use ide_db::defs::{Definition, NameRefClass}; use syntax::{ - SyntaxKind, SyntaxNode, + SyntaxKind, SyntaxNode, T, ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility}, match_ast, - syntax_editor::{Position, SyntaxEditor}, + syntax_editor::{Element, Position, SyntaxEditor}, }; use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder}; @@ -72,7 +72,7 @@ pub(crate) fn convert_tuple_struct_to_named_struct( Either::Right(v) => Either::Right(ctx.sema.to_def(v)?), }; let target = strukt_or_variant.as_ref().either(|s| s.syntax(), |v| v.syntax()).text_range(); - + let syntax = strukt_or_variant.as_ref().either(|s| s.syntax(), |v| v.syntax()); acc.add( AssistId::refactor_rewrite("convert_tuple_struct_to_named_struct"), "Convert to named struct", @@ -81,58 +81,53 @@ pub(crate) fn convert_tuple_struct_to_named_struct( let names = generate_names(tuple_fields.fields()); edit_field_references(ctx, edit, tuple_fields.fields(), &names); edit_struct_references(ctx, edit, strukt_def, &names); - edit_struct_def(ctx, edit, &strukt_or_variant, tuple_fields, names); + let mut editor = edit.make_editor(syntax); + edit_struct_def(&mut editor, &strukt_or_variant, tuple_fields, names); + edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) } fn edit_struct_def( - ctx: &AssistContext<'_>, - edit: &mut SourceChangeBuilder, + editor: &mut SyntaxEditor, strukt: &Either, tuple_fields: ast::TupleFieldList, names: Vec, ) { let record_fields = tuple_fields.fields().zip(names).filter_map(|(f, name)| { let field = ast::make::record_field(f.visibility(), name, f.ty()?); - let mut editor = SyntaxEditor::new(field.syntax().clone()); - editor.insert_all( + let mut field_editor = SyntaxEditor::new(field.syntax().clone()); + field_editor.insert_all( Position::first_child_of(field.syntax()), f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(), ); - ast::RecordField::cast(editor.finish().new_root().clone()) + ast::RecordField::cast(field_editor.finish().new_root().clone()) }); - let record_fields = ast::make::record_field_list(record_fields); - let tuple_fields_text_range = tuple_fields.syntax().text_range(); - - edit.edit_file(ctx.vfs_file_id()); + let record_fields = ast::make::record_field_list(record_fields).clone_for_update(); + let tuple_fields_before = Position::before(tuple_fields.syntax()); if let Either::Left(strukt) = strukt { if let Some(w) = strukt.where_clause() { - edit.delete(w.syntax().text_range()); - edit.insert( - tuple_fields_text_range.start(), - ast::make::tokens::single_newline().text(), - ); - edit.insert(tuple_fields_text_range.start(), w.syntax().text()); + editor.delete(w.syntax()); + let mut insert_element = Vec::new(); + insert_element.push(ast::make::tokens::single_newline().syntax_element()); + insert_element.push(w.syntax().clone_for_update().syntax_element()); if w.syntax().last_token().is_none_or(|t| t.kind() != SyntaxKind::COMMA) { - edit.insert(tuple_fields_text_range.start(), ","); + insert_element.push(ast::make::token(T![,]).into()); } - edit.insert( - tuple_fields_text_range.start(), - ast::make::tokens::single_newline().text(), - ); + insert_element.push(ast::make::tokens::single_newline().syntax_element()); + editor.insert_all(tuple_fields_before, insert_element); } else { - edit.insert(tuple_fields_text_range.start(), ast::make::tokens::single_space().text()); + editor.insert(tuple_fields_before, ast::make::tokens::single_space()); } if let Some(t) = strukt.semicolon_token() { - edit.delete(t.text_range()); + editor.delete(t); } } else { - edit.insert(tuple_fields_text_range.start(), ast::make::tokens::single_space().text()); + editor.insert(tuple_fields_before, ast::make::tokens::single_space()); } - edit.replace(tuple_fields_text_range, record_fields.to_string()); + editor.replace(tuple_fields.syntax(), record_fields.syntax()); } fn edit_struct_references( @@ -1015,8 +1010,7 @@ where pub struct $0Foo(#[my_custom_attr] u32); "#, r#" -pub struct Foo { #[my_custom_attr] -field1: u32 } +pub struct Foo { #[my_custom_attr]field1: u32 } "#, ); } From 8f898729220258b58d8044d4adaff14d45288fe6 Mon Sep 17 00:00:00 2001 From: Hayashi Mikihiro <34ttrweoewiwe28@gmail.com> Date: Sun, 27 Jul 2025 13:24:08 +0900 Subject: [PATCH 044/118] Migrate `convert_tuple_struct_to_named_struct' assist to use `SyntaxEditor' Signed-off-by: Hayashi Mikihiro <34ttrweoewiwe28@gmail.com> --- .../convert_tuple_struct_to_named_struct.rs | 103 ++++++++++++------ 1 file changed, 68 insertions(+), 35 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 6d729c9ea995e..f4041f49419a7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -1,7 +1,9 @@ use either::Either; +use hir::FileRangeWrapper; use ide_db::defs::{Definition, NameRefClass}; +use std::ops::RangeInclusive; use syntax::{ - SyntaxKind, SyntaxNode, T, + SyntaxElement, SyntaxKind, SyntaxNode, T, TextSize, ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility}, match_ast, syntax_editor::{Element, Position, SyntaxEditor}, @@ -80,8 +82,8 @@ pub(crate) fn convert_tuple_struct_to_named_struct( |edit| { let names = generate_names(tuple_fields.fields()); edit_field_references(ctx, edit, tuple_fields.fields(), &names); - edit_struct_references(ctx, edit, strukt_def, &names); let mut editor = edit.make_editor(syntax); + edit_struct_references(ctx, edit, strukt_def, &names); edit_struct_def(&mut editor, &strukt_or_variant, tuple_fields, names); edit.add_file_edits(ctx.vfs_file_id(), editor); }, @@ -142,27 +144,21 @@ fn edit_struct_references( }; let usages = strukt_def.usages(&ctx.sema).include_self_refs().all(); - let edit_node = |edit: &mut SourceChangeBuilder, node: SyntaxNode| -> Option<()> { + let edit_node = |node: SyntaxNode| -> Option { match_ast! { match node { ast::TupleStructPat(tuple_struct_pat) => { - let file_range = ctx.sema.original_range_opt(&node)?; - edit.edit_file(file_range.file_id.file_id(ctx.db())); - edit.replace( - file_range.range, - ast::make::record_pat_with_fields( - tuple_struct_pat.path()?, - ast::make::record_pat_field_list(tuple_struct_pat.fields().zip(names).map( - |(pat, name)| { - ast::make::record_pat_field( - ast::make::name_ref(&name.to_string()), - pat, - ) - }, - ), None), - ) - .to_string(), - ); + Some(ast::make::record_pat_with_fields( + tuple_struct_pat.path()?, + ast::make::record_pat_field_list(tuple_struct_pat.fields().zip(names).map( + |(pat, name)| { + ast::make::record_pat_field( + ast::make::name_ref(&name.to_string()), + pat, + ) + }, + ), None), + ).syntax().clone_for_update()) }, // for tuple struct creations like Foo(42) ast::CallExpr(call_expr) => { @@ -179,8 +175,7 @@ fn edit_struct_references( let arg_list = call_expr.syntax().descendants().find_map(ast::ArgList::cast)?; - edit.replace( - ctx.sema.original_range(&node).range, + Some( ast::make::record_expr( path, ast::make::record_expr_field_list(arg_list.args().zip(names).map( @@ -191,25 +186,58 @@ fn edit_struct_references( ) }, )), - ) - .to_string(), - ); + ).syntax().clone_for_update() + ) }, _ => return None, } } - Some(()) }; for (file_id, refs) in usages { - edit.edit_file(file_id.file_id(ctx.db())); - for r in refs { - for node in r.name.syntax().ancestors() { - if edit_node(edit, node).is_some() { - break; + let source = ctx.sema.parse(file_id); + let source = source.syntax(); + + let mut editor = edit.make_editor(source); + for r in refs.iter().rev() { + if let Some((old_node, new_node)) = r + .name + .syntax() + .ancestors() + .find_map(|node| Some((node.clone(), edit_node(node.clone())?))) + { + if let Some(old_node) = ctx.sema.original_syntax_node_rooted(&old_node) { + editor.replace(old_node, new_node); + } else { + let FileRangeWrapper { file_id: _, range } = ctx.sema.original_range(&old_node); + let parent = source.covering_element(range); + match parent { + SyntaxElement::Token(token) => { + editor.replace(token, new_node.syntax_element()); + } + SyntaxElement::Node(parent_node) => { + // replace the part of macro + // ``` + // foo!(a, Test::A(0)); + // ^^^^^^^^^^^^^^^ // parent_node + // ^^^^^^^^^^ // replace_range + // ``` + let start = parent_node + .children_with_tokens() + .find(|t| t.text_range().contains(range.start())); + let end = parent_node + .children_with_tokens() + .find(|t| t.text_range().contains(range.end() - TextSize::new(1))); + if let (Some(start), Some(end)) = (start, end) { + let replace_range = RangeInclusive::new(start, end); + editor.replace_all(replace_range, vec![new_node.into()]); + } + } + } } } } + edit.add_file_edits(file_id.file_id(ctx.db()), editor); } } @@ -227,12 +255,17 @@ fn edit_field_references( let def = Definition::Field(field); let usages = def.usages(&ctx.sema).all(); for (file_id, refs) in usages { - edit.edit_file(file_id.file_id(ctx.db())); + let source = ctx.sema.parse(file_id); + let source = source.syntax(); + let mut editor = edit.make_editor(source); for r in refs { - if let Some(name_ref) = r.name.as_name_ref() { - edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text()); + if let Some(name_ref) = r.name.as_name_ref() + && let Some(original) = ctx.sema.original_ast_node(name_ref.clone()) + { + editor.replace(original.syntax(), name.syntax()); } } + edit.add_file_edits(file_id.file_id(ctx.db()), editor); } } } @@ -242,7 +275,7 @@ fn generate_names(fields: impl Iterator) -> Vec Date: Thu, 17 Jul 2025 06:09:15 +0200 Subject: [PATCH 045/118] avoid the need to specify if toc should be generated - this removes one external dependency, mdbook-toc - this steals code from rustc book --- .../rustc-dev-guide/.github/workflows/ci.yml | 4 +- src/doc/rustc-dev-guide/README.md | 6 +- src/doc/rustc-dev-guide/book.toml | 11 +- src/doc/rustc-dev-guide/pagetoc.css | 84 ++++++++++++++ src/doc/rustc-dev-guide/pagetoc.js | 104 ++++++++++++++++++ 5 files changed, 198 insertions(+), 11 deletions(-) create mode 100644 src/doc/rustc-dev-guide/pagetoc.css create mode 100644 src/doc/rustc-dev-guide/pagetoc.js diff --git a/src/doc/rustc-dev-guide/.github/workflows/ci.yml b/src/doc/rustc-dev-guide/.github/workflows/ci.yml index daf5223cbd4ac..6eabb999fb013 100644 --- a/src/doc/rustc-dev-guide/.github/workflows/ci.yml +++ b/src/doc/rustc-dev-guide/.github/workflows/ci.yml @@ -17,7 +17,6 @@ jobs: MDBOOK_VERSION: 0.4.48 MDBOOK_LINKCHECK2_VERSION: 0.9.1 MDBOOK_MERMAID_VERSION: 0.12.6 - MDBOOK_TOC_VERSION: 0.11.2 MDBOOK_OUTPUT__LINKCHECK__FOLLOW_WEB_LINKS: ${{ github.event_name != 'pull_request' }} DEPLOY_DIR: book/html BASE_SHA: ${{ github.event.pull_request.base.sha }} @@ -34,7 +33,7 @@ jobs: with: path: | ~/.cargo/bin - key: ${{ runner.os }}-${{ env.MDBOOK_VERSION }}--${{ env.MDBOOK_LINKCHECK2_VERSION }}--${{ env.MDBOOK_TOC_VERSION }}--${{ env.MDBOOK_MERMAID_VERSION }} + key: ${{ runner.os }}-${{ env.MDBOOK_VERSION }}--${{ env.MDBOOK_LINKCHECK2_VERSION }}--${{ env.MDBOOK_MERMAID_VERSION }} - name: Restore cached Linkcheck if: github.event_name == 'schedule' @@ -57,7 +56,6 @@ jobs: run: | cargo install mdbook --version ${{ env.MDBOOK_VERSION }} cargo install mdbook-linkcheck2 --version ${{ env.MDBOOK_LINKCHECK2_VERSION }} - cargo install mdbook-toc --version ${{ env.MDBOOK_TOC_VERSION }} cargo install mdbook-mermaid --version ${{ env.MDBOOK_MERMAID_VERSION }} - name: Check build diff --git a/src/doc/rustc-dev-guide/README.md b/src/doc/rustc-dev-guide/README.md index 5932da467ab2d..1ad895aeda2e1 100644 --- a/src/doc/rustc-dev-guide/README.md +++ b/src/doc/rustc-dev-guide/README.md @@ -43,7 +43,7 @@ rustdocs][rustdocs]. To build a local static HTML site, install [`mdbook`](https://github.com/rust-lang/mdBook) with: ``` -cargo install mdbook mdbook-linkcheck2 mdbook-toc mdbook-mermaid +cargo install mdbook mdbook-linkcheck2 mdbook-mermaid ``` and execute the following command in the root of the repository: @@ -67,8 +67,8 @@ ENABLE_LINKCHECK=1 mdbook serve ### Table of Contents -We use `mdbook-toc` to auto-generate TOCs for long sections. You can invoke the preprocessor by -including the `` marker at the place where you want the TOC. +Each page has a TOC that is automatically generated by `pagetoc.js`. +There is an associated `pagetoc.css`, for styling. ## Synchronizing josh subtree with rustc diff --git a/src/doc/rustc-dev-guide/book.toml b/src/doc/rustc-dev-guide/book.toml index b84b1e7548a86..daf237ed9081b 100644 --- a/src/doc/rustc-dev-guide/book.toml +++ b/src/doc/rustc-dev-guide/book.toml @@ -6,17 +6,18 @@ description = "A guide to developing the Rust compiler (rustc)" [build] create-missing = false -[preprocessor.toc] -command = "mdbook-toc" -renderer = ["html"] - [preprocessor.mermaid] command = "mdbook-mermaid" [output.html] git-repository-url = "https://github.com/rust-lang/rustc-dev-guide" edit-url-template = "https://github.com/rust-lang/rustc-dev-guide/edit/master/{path}" -additional-js = ["mermaid.min.js", "mermaid-init.js"] +additional-js = [ + "mermaid.min.js", + "mermaid-init.js", + "pagetoc.js", +] +additional-css = ["pagetoc.css"] [output.html.search] use-boolean-and = true diff --git a/src/doc/rustc-dev-guide/pagetoc.css b/src/doc/rustc-dev-guide/pagetoc.css new file mode 100644 index 0000000000000..fa709194f3755 --- /dev/null +++ b/src/doc/rustc-dev-guide/pagetoc.css @@ -0,0 +1,84 @@ +/* Inspired by https://github.com/JorelAli/mdBook-pagetoc/tree/98ee241 (under WTFPL) */ + +:root { + --toc-width: 270px; + --center-content-toc-shift: calc(-1 * var(--toc-width) / 2); +} + +.nav-chapters { + /* adjust width of buttons that bring to the previous or the next page */ + min-width: 50px; +} + +@media only screen { + @media (max-width: 1179px) { + .sidebar-hidden #sidetoc { + display: none; + } + } + + @media (max-width: 1439px) { + .sidebar-visible #sidetoc { + display: none; + } + } + + @media (1180px <= width <= 1439px) { + .sidebar-hidden main { + position: relative; + left: var(--center-content-toc-shift); + } + } + + @media (1440px <= width <= 1700px) { + .sidebar-visible main { + position: relative; + left: var(--center-content-toc-shift); + } + } + + #sidetoc { + margin-left: calc(100% + 20px); + } + #pagetoc { + position: fixed; + /* adjust TOC width */ + width: var(--toc-width); + height: calc(100vh - var(--menu-bar-height) - 0.67em * 4); + overflow: auto; + } + #pagetoc a { + border-left: 1px solid var(--sidebar-bg); + color: var(--fg); + display: block; + padding-bottom: 5px; + padding-top: 5px; + padding-left: 10px; + text-align: left; + text-decoration: none; + } + #pagetoc a:hover, + #pagetoc a.active { + background: var(--sidebar-bg); + color: var(--sidebar-active) !important; + } + #pagetoc .active { + background: var(--sidebar-bg); + color: var(--sidebar-active); + } + #pagetoc .pagetoc-H2 { + padding-left: 20px; + } + #pagetoc .pagetoc-H3 { + padding-left: 40px; + } + #pagetoc .pagetoc-H4 { + padding-left: 60px; + } +} + +@media print { + #sidetoc { + display: none; + } +} diff --git a/src/doc/rustc-dev-guide/pagetoc.js b/src/doc/rustc-dev-guide/pagetoc.js new file mode 100644 index 0000000000000..927a5b10749b5 --- /dev/null +++ b/src/doc/rustc-dev-guide/pagetoc.js @@ -0,0 +1,104 @@ +// Inspired by https://github.com/JorelAli/mdBook-pagetoc/tree/98ee241 (under WTFPL) + +let activeHref = location.href; +function updatePageToc(elem = undefined) { + let selectedPageTocElem = elem; + const pagetoc = document.getElementById("pagetoc"); + + function getRect(element) { + return element.getBoundingClientRect(); + } + + function overflowTop(container, element) { + return getRect(container).top - getRect(element).top; + } + + function overflowBottom(container, element) { + return getRect(container).bottom - getRect(element).bottom; + } + + // We've not selected a heading to highlight, and the URL needs updating + // so we need to find a heading based on the URL + if (selectedPageTocElem === undefined && location.href !== activeHref) { + activeHref = location.href; + for (const pageTocElement of pagetoc.children) { + if (pageTocElement.href === activeHref) { + selectedPageTocElem = pageTocElement; + } + } + } + + // We still don't have a selected heading, let's try and find the most + // suitable heading based on the scroll position + if (selectedPageTocElem === undefined) { + const margin = window.innerHeight / 3; + + const headers = document.getElementsByClassName("header"); + for (let i = 0; i < headers.length; i++) { + const header = headers[i]; + if (selectedPageTocElem === undefined && getRect(header).top >= 0) { + if (getRect(header).top < margin) { + selectedPageTocElem = header; + } else { + selectedPageTocElem = headers[Math.max(0, i - 1)]; + } + } + // a very long last section's heading is over the screen + if (selectedPageTocElem === undefined && i === headers.length - 1) { + selectedPageTocElem = header; + } + } + } + + // Remove the active flag from all pagetoc elements + for (const pageTocElement of pagetoc.children) { + pageTocElement.classList.remove("active"); + } + + // If we have a selected heading, set it to active and scroll to it + if (selectedPageTocElem !== undefined) { + for (const pageTocElement of pagetoc.children) { + if (selectedPageTocElem.href.localeCompare(pageTocElement.href) === 0) { + pageTocElement.classList.add("active"); + if (overflowTop(pagetoc, pageTocElement) > 0) { + pagetoc.scrollTop = pageTocElement.offsetTop; + } + if (overflowBottom(pagetoc, pageTocElement) < 0) { + pagetoc.scrollTop -= overflowBottom(pagetoc, pageTocElement); + } + } + } + } +} + +if (document.getElementById("sidetoc") === null && + document.getElementsByClassName("header").length > 0) { + // The sidetoc element doesn't exist yet, let's create it + + // Create the empty sidetoc and pagetoc elements + const sidetoc = document.createElement("div"); + const pagetoc = document.createElement("div"); + sidetoc.id = "sidetoc"; + pagetoc.id = "pagetoc"; + sidetoc.appendChild(pagetoc); + + // And append them to the current DOM + const main = document.querySelector('main'); + main.insertBefore(sidetoc, main.firstChild); + + // Populate sidebar on load + window.addEventListener("load", () => { + for (const header of document.getElementsByClassName("header")) { + const link = document.createElement("a"); + link.innerHTML = header.innerHTML; + link.href = header.hash; + link.classList.add("pagetoc-" + header.parentElement.tagName); + document.getElementById("pagetoc").appendChild(link); + link.onclick = () => updatePageToc(link); + } + updatePageToc(); + }); + + // Update page table of contents selected heading on scroll + window.addEventListener("scroll", () => updatePageToc()); +} From 65589ad33f544003e49cabeb0b76f6b1f0d3ce30 Mon Sep 17 00:00:00 2001 From: Tshepang Mbambo Date: Mon, 28 Jul 2025 11:45:21 +0200 Subject: [PATCH 046/118] remove the markers --- src/doc/rustc-dev-guide/src/asm.md | 2 -- src/doc/rustc-dev-guide/src/backend/backend-agnostic.md | 2 -- src/doc/rustc-dev-guide/src/backend/implicit-caller-location.md | 2 -- src/doc/rustc-dev-guide/src/backend/monomorph.md | 2 -- src/doc/rustc-dev-guide/src/backend/updating-llvm.md | 2 -- .../src/borrow_check/moves_and_initialization/move_paths.md | 2 -- src/doc/rustc-dev-guide/src/borrow_check/region_inference.md | 2 -- .../src/borrow_check/region_inference/constraint_propagation.md | 2 -- .../src/borrow_check/region_inference/lifetime_parameters.md | 2 -- .../src/borrow_check/region_inference/member_constraints.md | 2 -- .../borrow_check/region_inference/placeholders_and_universes.md | 2 -- src/doc/rustc-dev-guide/src/bug-fix-procedure.md | 2 -- .../src/building/bootstrapping/what-bootstrapping-does.md | 2 -- src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md | 2 -- src/doc/rustc-dev-guide/src/building/new-target.md | 2 -- src/doc/rustc-dev-guide/src/building/optimized-build.md | 2 -- src/doc/rustc-dev-guide/src/building/suggested.md | 2 -- src/doc/rustc-dev-guide/src/compiler-debugging.md | 2 -- src/doc/rustc-dev-guide/src/compiler-src.md | 2 -- src/doc/rustc-dev-guide/src/const-eval/interpret.md | 2 -- src/doc/rustc-dev-guide/src/contributing.md | 2 -- src/doc/rustc-dev-guide/src/coroutine-closures.md | 2 -- src/doc/rustc-dev-guide/src/debugging-support-in-rustc.md | 2 -- src/doc/rustc-dev-guide/src/diagnostics.md | 2 -- src/doc/rustc-dev-guide/src/early_late_parameters.md | 2 -- src/doc/rustc-dev-guide/src/getting-started.md | 2 -- src/doc/rustc-dev-guide/src/git.md | 2 -- src/doc/rustc-dev-guide/src/guides/editions.md | 2 -- src/doc/rustc-dev-guide/src/hir.md | 2 -- src/doc/rustc-dev-guide/src/implementing_new_features.md | 2 -- src/doc/rustc-dev-guide/src/llvm-coverage-instrumentation.md | 2 -- src/doc/rustc-dev-guide/src/macro-expansion.md | 2 -- src/doc/rustc-dev-guide/src/mir/construction.md | 2 -- src/doc/rustc-dev-guide/src/mir/dataflow.md | 2 -- src/doc/rustc-dev-guide/src/mir/drop-elaboration.md | 2 -- src/doc/rustc-dev-guide/src/mir/index.md | 2 -- src/doc/rustc-dev-guide/src/name-resolution.md | 2 -- src/doc/rustc-dev-guide/src/normalization.md | 2 -- src/doc/rustc-dev-guide/src/overview.md | 2 -- src/doc/rustc-dev-guide/src/panic-implementation.md | 2 -- src/doc/rustc-dev-guide/src/profile-guided-optimization.md | 2 -- .../src/queries/incremental-compilation-in-detail.md | 2 -- src/doc/rustc-dev-guide/src/queries/incremental-compilation.md | 2 -- .../src/queries/query-evaluation-model-in-detail.md | 2 -- src/doc/rustc-dev-guide/src/queries/salsa.md | 2 -- src/doc/rustc-dev-guide/src/query.md | 2 -- src/doc/rustc-dev-guide/src/rustdoc-internals.md | 2 -- src/doc/rustc-dev-guide/src/rustdoc-internals/search.md | 2 -- src/doc/rustc-dev-guide/src/rustdoc.md | 2 -- src/doc/rustc-dev-guide/src/stability.md | 2 -- src/doc/rustc-dev-guide/src/stabilization_guide.md | 2 -- src/doc/rustc-dev-guide/src/test-implementation.md | 2 -- src/doc/rustc-dev-guide/src/tests/adding.md | 2 -- src/doc/rustc-dev-guide/src/tests/compiletest.md | 2 -- src/doc/rustc-dev-guide/src/tests/directives.md | 2 -- src/doc/rustc-dev-guide/src/tests/intro.md | 2 -- src/doc/rustc-dev-guide/src/tests/running.md | 2 -- src/doc/rustc-dev-guide/src/tests/ui.md | 2 -- src/doc/rustc-dev-guide/src/thir.md | 2 -- src/doc/rustc-dev-guide/src/tracing.md | 2 -- src/doc/rustc-dev-guide/src/traits/goals-and-clauses.md | 2 -- src/doc/rustc-dev-guide/src/traits/lowering-to-logic.md | 2 -- src/doc/rustc-dev-guide/src/traits/resolution.md | 2 -- src/doc/rustc-dev-guide/src/ty.md | 2 -- src/doc/rustc-dev-guide/src/type-inference.md | 2 -- src/doc/rustc-dev-guide/src/typing_parameter_envs.md | 2 -- src/doc/rustc-dev-guide/src/variance.md | 2 -- src/doc/rustc-dev-guide/src/walkthrough.md | 2 -- 68 files changed, 136 deletions(-) diff --git a/src/doc/rustc-dev-guide/src/asm.md b/src/doc/rustc-dev-guide/src/asm.md index 1bb493e73d584..b5857d5465e15 100644 --- a/src/doc/rustc-dev-guide/src/asm.md +++ b/src/doc/rustc-dev-guide/src/asm.md @@ -1,7 +1,5 @@ # Inline assembly - - ## Overview Inline assembly in rustc mostly revolves around taking an `asm!` macro invocation and plumbing it diff --git a/src/doc/rustc-dev-guide/src/backend/backend-agnostic.md b/src/doc/rustc-dev-guide/src/backend/backend-agnostic.md index 0f81d3cb48a1d..2fdda4eda99a3 100644 --- a/src/doc/rustc-dev-guide/src/backend/backend-agnostic.md +++ b/src/doc/rustc-dev-guide/src/backend/backend-agnostic.md @@ -1,7 +1,5 @@ # Backend Agnostic Codegen - - [`rustc_codegen_ssa`] provides an abstract interface for all backends to implement, namely LLVM, [Cranelift], and [GCC]. diff --git a/src/doc/rustc-dev-guide/src/backend/implicit-caller-location.md b/src/doc/rustc-dev-guide/src/backend/implicit-caller-location.md index c5ee00813a344..9ca4bcab078e0 100644 --- a/src/doc/rustc-dev-guide/src/backend/implicit-caller-location.md +++ b/src/doc/rustc-dev-guide/src/backend/implicit-caller-location.md @@ -1,7 +1,5 @@ # Implicit caller location - - Approved in [RFC 2091], this feature enables the accurate reporting of caller location during panics initiated from functions like `Option::unwrap`, `Result::expect`, and `Index::index`. This feature adds the [`#[track_caller]`][attr-reference] attribute for functions, the diff --git a/src/doc/rustc-dev-guide/src/backend/monomorph.md b/src/doc/rustc-dev-guide/src/backend/monomorph.md index 7ebb4d2b1e81c..e9d98597ee0d9 100644 --- a/src/doc/rustc-dev-guide/src/backend/monomorph.md +++ b/src/doc/rustc-dev-guide/src/backend/monomorph.md @@ -1,7 +1,5 @@ # Monomorphization - - As you probably know, Rust has a very expressive type system that has extensive support for generic types. But of course, assembly is not generic, so we need to figure out the concrete types of all the generics before the code can diff --git a/src/doc/rustc-dev-guide/src/backend/updating-llvm.md b/src/doc/rustc-dev-guide/src/backend/updating-llvm.md index 18c822aad790e..ebef15d40baf5 100644 --- a/src/doc/rustc-dev-guide/src/backend/updating-llvm.md +++ b/src/doc/rustc-dev-guide/src/backend/updating-llvm.md @@ -1,7 +1,5 @@ # Updating LLVM - - Rust supports building against multiple LLVM versions: diff --git a/src/doc/rustc-dev-guide/src/borrow_check/moves_and_initialization/move_paths.md b/src/doc/rustc-dev-guide/src/borrow_check/moves_and_initialization/move_paths.md index ad9c75d629607..95518fbc0184f 100644 --- a/src/doc/rustc-dev-guide/src/borrow_check/moves_and_initialization/move_paths.md +++ b/src/doc/rustc-dev-guide/src/borrow_check/moves_and_initialization/move_paths.md @@ -1,7 +1,5 @@ # Move paths - - In reality, it's not enough to track initialization at the granularity of local variables. Rust also allows us to do moves and initialization at the field granularity: diff --git a/src/doc/rustc-dev-guide/src/borrow_check/region_inference.md b/src/doc/rustc-dev-guide/src/borrow_check/region_inference.md index 85e71b4fa4298..0d55ab955836e 100644 --- a/src/doc/rustc-dev-guide/src/borrow_check/region_inference.md +++ b/src/doc/rustc-dev-guide/src/borrow_check/region_inference.md @@ -1,7 +1,5 @@ # Region inference (NLL) - - The MIR-based region checking code is located in [the `rustc_mir::borrow_check` module][nll]. diff --git a/src/doc/rustc-dev-guide/src/borrow_check/region_inference/constraint_propagation.md b/src/doc/rustc-dev-guide/src/borrow_check/region_inference/constraint_propagation.md index 4c30d25e0406f..c3f8c03cb29f5 100644 --- a/src/doc/rustc-dev-guide/src/borrow_check/region_inference/constraint_propagation.md +++ b/src/doc/rustc-dev-guide/src/borrow_check/region_inference/constraint_propagation.md @@ -1,7 +1,5 @@ # Constraint propagation - - The main work of the region inference is **constraint propagation**, which is done in the [`propagate_constraints`] function. There are three sorts of constraints that are used in NLL, and we'll explain how diff --git a/src/doc/rustc-dev-guide/src/borrow_check/region_inference/lifetime_parameters.md b/src/doc/rustc-dev-guide/src/borrow_check/region_inference/lifetime_parameters.md index fadfac4045696..2d337dbc020f6 100644 --- a/src/doc/rustc-dev-guide/src/borrow_check/region_inference/lifetime_parameters.md +++ b/src/doc/rustc-dev-guide/src/borrow_check/region_inference/lifetime_parameters.md @@ -1,7 +1,5 @@ # Universal regions - - "Universal regions" is the name that the code uses to refer to "named lifetimes" -- e.g., lifetime parameters and `'static`. The name derives from the fact that such lifetimes are "universally quantified" diff --git a/src/doc/rustc-dev-guide/src/borrow_check/region_inference/member_constraints.md b/src/doc/rustc-dev-guide/src/borrow_check/region_inference/member_constraints.md index fd7c87ffcea7b..2804c97724f5a 100644 --- a/src/doc/rustc-dev-guide/src/borrow_check/region_inference/member_constraints.md +++ b/src/doc/rustc-dev-guide/src/borrow_check/region_inference/member_constraints.md @@ -1,7 +1,5 @@ # Member constraints - - A member constraint `'m member of ['c_1..'c_N]` expresses that the region `'m` must be *equal* to some **choice regions** `'c_i` (for some `i`). These constraints cannot be expressed by users, but they diff --git a/src/doc/rustc-dev-guide/src/borrow_check/region_inference/placeholders_and_universes.md b/src/doc/rustc-dev-guide/src/borrow_check/region_inference/placeholders_and_universes.md index 91c8c45261191..11fd2a5fc7db8 100644 --- a/src/doc/rustc-dev-guide/src/borrow_check/region_inference/placeholders_and_universes.md +++ b/src/doc/rustc-dev-guide/src/borrow_check/region_inference/placeholders_and_universes.md @@ -1,7 +1,5 @@ # Placeholders and universes - - From time to time we have to reason about regions that we can't concretely know. For example, consider this program: diff --git a/src/doc/rustc-dev-guide/src/bug-fix-procedure.md b/src/doc/rustc-dev-guide/src/bug-fix-procedure.md index 55436261fdefa..6b13c97023f54 100644 --- a/src/doc/rustc-dev-guide/src/bug-fix-procedure.md +++ b/src/doc/rustc-dev-guide/src/bug-fix-procedure.md @@ -1,7 +1,5 @@ # Procedures for breaking changes - - This page defines the best practices procedure for making bug fixes or soundness corrections in the compiler that can cause existing code to stop compiling. This text is based on diff --git a/src/doc/rustc-dev-guide/src/building/bootstrapping/what-bootstrapping-does.md b/src/doc/rustc-dev-guide/src/building/bootstrapping/what-bootstrapping-does.md index 2793ad4381526..da425d8d39bba 100644 --- a/src/doc/rustc-dev-guide/src/building/bootstrapping/what-bootstrapping-does.md +++ b/src/doc/rustc-dev-guide/src/building/bootstrapping/what-bootstrapping-does.md @@ -1,7 +1,5 @@ # What Bootstrapping does - - [*Bootstrapping*][boot] is the process of using a compiler to compile itself. More accurately, it means using an older compiler to compile a newer version of the same compiler. diff --git a/src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md b/src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md index d29cd14481025..b07d3533f59bc 100644 --- a/src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md +++ b/src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md @@ -1,7 +1,5 @@ # How to build and run the compiler - -
For `profile = "library"` users, or users who use `download-rustc = true | "if-unchanged"`, please be advised that diff --git a/src/doc/rustc-dev-guide/src/building/new-target.md b/src/doc/rustc-dev-guide/src/building/new-target.md index e11a2cd8ee577..436aec8ee265b 100644 --- a/src/doc/rustc-dev-guide/src/building/new-target.md +++ b/src/doc/rustc-dev-guide/src/building/new-target.md @@ -6,8 +6,6 @@ relevant to your desired goal. See also the associated documentation in the [target tier policy]. - - [target tier policy]: https://doc.rust-lang.org/rustc/target-tier-policy.html#adding-a-new-target ## Specifying a new LLVM diff --git a/src/doc/rustc-dev-guide/src/building/optimized-build.md b/src/doc/rustc-dev-guide/src/building/optimized-build.md index 62dfaca89d24e..863ed9749fb7e 100644 --- a/src/doc/rustc-dev-guide/src/building/optimized-build.md +++ b/src/doc/rustc-dev-guide/src/building/optimized-build.md @@ -1,7 +1,5 @@ # Optimized build of the compiler - - There are multiple additional build configuration options and techniques that can be used to compile a build of `rustc` that is as optimized as possible (for example when building `rustc` for a Linux distribution). The status of these configuration options for various Rust targets is tracked [here]. diff --git a/src/doc/rustc-dev-guide/src/building/suggested.md b/src/doc/rustc-dev-guide/src/building/suggested.md index c046161e77f25..35c7e935b5688 100644 --- a/src/doc/rustc-dev-guide/src/building/suggested.md +++ b/src/doc/rustc-dev-guide/src/building/suggested.md @@ -3,8 +3,6 @@ The full bootstrapping process takes quite a while. Here are some suggestions to make your life easier. - - ## Installing a pre-push hook CI will automatically fail your build if it doesn't pass `tidy`, our internal diff --git a/src/doc/rustc-dev-guide/src/compiler-debugging.md b/src/doc/rustc-dev-guide/src/compiler-debugging.md index 102e20207792e..edd2aa6c5f64b 100644 --- a/src/doc/rustc-dev-guide/src/compiler-debugging.md +++ b/src/doc/rustc-dev-guide/src/compiler-debugging.md @@ -1,7 +1,5 @@ # Debugging the compiler - - This chapter contains a few tips to debug the compiler. These tips aim to be useful no matter what you are working on. Some of the other chapters have advice about specific parts of the compiler (e.g. the [Queries Debugging and diff --git a/src/doc/rustc-dev-guide/src/compiler-src.md b/src/doc/rustc-dev-guide/src/compiler-src.md index 00aa96226849d..d67bacb1b3395 100644 --- a/src/doc/rustc-dev-guide/src/compiler-src.md +++ b/src/doc/rustc-dev-guide/src/compiler-src.md @@ -1,7 +1,5 @@ # High-level overview of the compiler source - - Now that we have [seen what the compiler does][orgch], let's take a look at the structure of the [`rust-lang/rust`] repository, where the rustc source code lives. diff --git a/src/doc/rustc-dev-guide/src/const-eval/interpret.md b/src/doc/rustc-dev-guide/src/const-eval/interpret.md index 51a539de5cb6e..08382b12ff00a 100644 --- a/src/doc/rustc-dev-guide/src/const-eval/interpret.md +++ b/src/doc/rustc-dev-guide/src/const-eval/interpret.md @@ -1,7 +1,5 @@ # Interpreter - - The interpreter is a virtual machine for executing MIR without compiling to machine code. It is usually invoked via `tcx.const_eval_*` functions. The interpreter is shared between the compiler (for compile-time function diff --git a/src/doc/rustc-dev-guide/src/contributing.md b/src/doc/rustc-dev-guide/src/contributing.md index b3fcd79ec8184..963bef3af8de3 100644 --- a/src/doc/rustc-dev-guide/src/contributing.md +++ b/src/doc/rustc-dev-guide/src/contributing.md @@ -1,7 +1,5 @@ # Contribution procedures - - ## Bug reports While bugs are unfortunate, they're a reality in software. We can't fix what we diff --git a/src/doc/rustc-dev-guide/src/coroutine-closures.md b/src/doc/rustc-dev-guide/src/coroutine-closures.md index 48cdba44a9f52..2617c824a3911 100644 --- a/src/doc/rustc-dev-guide/src/coroutine-closures.md +++ b/src/doc/rustc-dev-guide/src/coroutine-closures.md @@ -1,7 +1,5 @@ # Async closures/"coroutine-closures" - - Please read [RFC 3668](https://rust-lang.github.io/rfcs/3668-async-closures.html) to understand the general motivation of the feature. This is a very technical and somewhat "vertical" chapter; ideally we'd split this and sprinkle it across all the relevant chapters, but for the purposes of understanding async closures *holistically*, I've put this together all here in one chapter. ## Coroutine-closures -- a technical deep dive diff --git a/src/doc/rustc-dev-guide/src/debugging-support-in-rustc.md b/src/doc/rustc-dev-guide/src/debugging-support-in-rustc.md index ac629934e0a42..bd4f795ce03b6 100644 --- a/src/doc/rustc-dev-guide/src/debugging-support-in-rustc.md +++ b/src/doc/rustc-dev-guide/src/debugging-support-in-rustc.md @@ -1,7 +1,5 @@ # Debugging support in the Rust compiler - - This document explains the state of debugging tools support in the Rust compiler (rustc). It gives an overview of GDB, LLDB, WinDbg/CDB, as well as infrastructure around Rust compiler to debug Rust code. diff --git a/src/doc/rustc-dev-guide/src/diagnostics.md b/src/doc/rustc-dev-guide/src/diagnostics.md index 33f5441d36e4f..82191e0a6eaf4 100644 --- a/src/doc/rustc-dev-guide/src/diagnostics.md +++ b/src/doc/rustc-dev-guide/src/diagnostics.md @@ -1,7 +1,5 @@ # Errors and lints - - A lot of effort has been put into making `rustc` have great error messages. This chapter is about how to emit compile errors and lints from the compiler. diff --git a/src/doc/rustc-dev-guide/src/early_late_parameters.md b/src/doc/rustc-dev-guide/src/early_late_parameters.md index 3f94b09056684..c472bdc2c4812 100644 --- a/src/doc/rustc-dev-guide/src/early_late_parameters.md +++ b/src/doc/rustc-dev-guide/src/early_late_parameters.md @@ -1,8 +1,6 @@ # Early vs Late bound parameters - - > **NOTE**: This chapter largely talks about early/late bound as being solely relevant when discussing function item types/function definitions. This is potentially not completely true, async blocks and closures should likely be discussed somewhat in this chapter. ## What does it mean to be "early" bound or "late" bound diff --git a/src/doc/rustc-dev-guide/src/getting-started.md b/src/doc/rustc-dev-guide/src/getting-started.md index d6c5c3ac8521b..04d2e37732fa9 100644 --- a/src/doc/rustc-dev-guide/src/getting-started.md +++ b/src/doc/rustc-dev-guide/src/getting-started.md @@ -3,8 +3,6 @@ Thank you for your interest in contributing to Rust! There are many ways to contribute, and we appreciate all of them. - - If this is your first time contributing, the [walkthrough] chapter can give you a good example of how a typical contribution would go. diff --git a/src/doc/rustc-dev-guide/src/git.md b/src/doc/rustc-dev-guide/src/git.md index 8726ddfce20cc..447c6fd454671 100644 --- a/src/doc/rustc-dev-guide/src/git.md +++ b/src/doc/rustc-dev-guide/src/git.md @@ -1,7 +1,5 @@ # Using Git - - The Rust project uses [Git] to manage its source code. In order to contribute, you'll need some familiarity with its features so that your changes can be incorporated into the compiler. diff --git a/src/doc/rustc-dev-guide/src/guides/editions.md b/src/doc/rustc-dev-guide/src/guides/editions.md index 9a92d4ebcb510..b65fbb13cd185 100644 --- a/src/doc/rustc-dev-guide/src/guides/editions.md +++ b/src/doc/rustc-dev-guide/src/guides/editions.md @@ -1,7 +1,5 @@ # Editions - - This chapter gives an overview of how Edition support works in rustc. This assumes that you are familiar with what Editions are (see the [Edition Guide]). diff --git a/src/doc/rustc-dev-guide/src/hir.md b/src/doc/rustc-dev-guide/src/hir.md index 72fb10701574b..38ba33112f2ec 100644 --- a/src/doc/rustc-dev-guide/src/hir.md +++ b/src/doc/rustc-dev-guide/src/hir.md @@ -1,7 +1,5 @@ # The HIR - - The HIR – "High-Level Intermediate Representation" – is the primary IR used in most of rustc. It is a compiler-friendly representation of the abstract syntax tree (AST) that is generated after parsing, macro expansion, and name diff --git a/src/doc/rustc-dev-guide/src/implementing_new_features.md b/src/doc/rustc-dev-guide/src/implementing_new_features.md index 76cf2386c826a..00bce8599e430 100644 --- a/src/doc/rustc-dev-guide/src/implementing_new_features.md +++ b/src/doc/rustc-dev-guide/src/implementing_new_features.md @@ -1,7 +1,5 @@ # Implementing new language features - - When you want to implement a new significant feature in the compiler, you need to go through this process to make sure everything goes smoothly. **NOTE: This section is for *language* features, not *library* features, which use [a different process].** diff --git a/src/doc/rustc-dev-guide/src/llvm-coverage-instrumentation.md b/src/doc/rustc-dev-guide/src/llvm-coverage-instrumentation.md index 880363b94bf2b..288b90f33c3d2 100644 --- a/src/doc/rustc-dev-guide/src/llvm-coverage-instrumentation.md +++ b/src/doc/rustc-dev-guide/src/llvm-coverage-instrumentation.md @@ -1,7 +1,5 @@ # LLVM source-based code coverage - - `rustc` supports detailed source-based code and test coverage analysis with a command line option (`-C instrument-coverage`) that instruments Rust libraries and binaries with additional instructions and data, at compile time. diff --git a/src/doc/rustc-dev-guide/src/macro-expansion.md b/src/doc/rustc-dev-guide/src/macro-expansion.md index a90f717004f0b..54d6d2b4e8138 100644 --- a/src/doc/rustc-dev-guide/src/macro-expansion.md +++ b/src/doc/rustc-dev-guide/src/macro-expansion.md @@ -1,7 +1,5 @@ # Macro expansion - - Rust has a very powerful macro system. In the previous chapter, we saw how the parser sets aside macros to be expanded (using temporary [placeholders]). This chapter is about the process of expanding those macros iteratively until diff --git a/src/doc/rustc-dev-guide/src/mir/construction.md b/src/doc/rustc-dev-guide/src/mir/construction.md index f2559a22b9555..8360d9ff1a8bc 100644 --- a/src/doc/rustc-dev-guide/src/mir/construction.md +++ b/src/doc/rustc-dev-guide/src/mir/construction.md @@ -1,7 +1,5 @@ # MIR construction - - The lowering of [HIR] to [MIR] occurs for the following (probably incomplete) list of items: diff --git a/src/doc/rustc-dev-guide/src/mir/dataflow.md b/src/doc/rustc-dev-guide/src/mir/dataflow.md index 85e57dd839b81..970e61196c122 100644 --- a/src/doc/rustc-dev-guide/src/mir/dataflow.md +++ b/src/doc/rustc-dev-guide/src/mir/dataflow.md @@ -1,7 +1,5 @@ # Dataflow Analysis - - If you work on the MIR, you will frequently come across various flavors of [dataflow analysis][wiki]. `rustc` uses dataflow to find uninitialized variables, determine what variables are live across a generator `yield` diff --git a/src/doc/rustc-dev-guide/src/mir/drop-elaboration.md b/src/doc/rustc-dev-guide/src/mir/drop-elaboration.md index 3b321fd44d1d3..4da612c83f0fc 100644 --- a/src/doc/rustc-dev-guide/src/mir/drop-elaboration.md +++ b/src/doc/rustc-dev-guide/src/mir/drop-elaboration.md @@ -1,7 +1,5 @@ # Drop elaboration - - ## Dynamic drops According to the [reference][reference-drop]: diff --git a/src/doc/rustc-dev-guide/src/mir/index.md b/src/doc/rustc-dev-guide/src/mir/index.md index f355875aa156e..8ba5f3ac8b784 100644 --- a/src/doc/rustc-dev-guide/src/mir/index.md +++ b/src/doc/rustc-dev-guide/src/mir/index.md @@ -1,7 +1,5 @@ # The MIR (Mid-level IR) - - MIR is Rust's _Mid-level Intermediate Representation_. It is constructed from [HIR](../hir.html). MIR was introduced in [RFC 1211]. It is a radically simplified form of Rust that is used for diff --git a/src/doc/rustc-dev-guide/src/name-resolution.md b/src/doc/rustc-dev-guide/src/name-resolution.md index 719ebce855366..2e96382f77970 100644 --- a/src/doc/rustc-dev-guide/src/name-resolution.md +++ b/src/doc/rustc-dev-guide/src/name-resolution.md @@ -1,7 +1,5 @@ # Name resolution - - In the previous chapters, we saw how the [*Abstract Syntax Tree* (`AST`)][ast] is built with all macros expanded. We saw how doing that requires doing some name resolution to resolve imports and macro names. In this chapter, we show diff --git a/src/doc/rustc-dev-guide/src/normalization.md b/src/doc/rustc-dev-guide/src/normalization.md index eb0962a412237..53e20f1c0db7f 100644 --- a/src/doc/rustc-dev-guide/src/normalization.md +++ b/src/doc/rustc-dev-guide/src/normalization.md @@ -1,7 +1,5 @@ # Aliases and Normalization - - ## Aliases In Rust there are a number of types that are considered equal to some "underlying" type, for example inherent associated types, trait associated types, free type aliases (`type Foo = u32`), and opaque types (`-> impl RPIT`). We consider such types to be "aliases", alias types are represented by the [`TyKind::Alias`][tykind_alias] variant, with the kind of alias tracked by the [`AliasTyKind`][aliaskind] enum. diff --git a/src/doc/rustc-dev-guide/src/overview.md b/src/doc/rustc-dev-guide/src/overview.md index 8a1a22fad660a..12b76828b5c3b 100644 --- a/src/doc/rustc-dev-guide/src/overview.md +++ b/src/doc/rustc-dev-guide/src/overview.md @@ -1,7 +1,5 @@ # Overview of the compiler - - This chapter is about the overall process of compiling a program -- how everything fits together. diff --git a/src/doc/rustc-dev-guide/src/panic-implementation.md b/src/doc/rustc-dev-guide/src/panic-implementation.md index 468190ffccd50..dba3f2146d23d 100644 --- a/src/doc/rustc-dev-guide/src/panic-implementation.md +++ b/src/doc/rustc-dev-guide/src/panic-implementation.md @@ -1,7 +1,5 @@ # Panicking in Rust - - ## Step 1: Invocation of the `panic!` macro. There are actually two panic macros - one defined in `core`, and one defined in `std`. diff --git a/src/doc/rustc-dev-guide/src/profile-guided-optimization.md b/src/doc/rustc-dev-guide/src/profile-guided-optimization.md index 2fa8102104513..4e3dadd406ec0 100644 --- a/src/doc/rustc-dev-guide/src/profile-guided-optimization.md +++ b/src/doc/rustc-dev-guide/src/profile-guided-optimization.md @@ -1,7 +1,5 @@ # Profile-guided optimization - - `rustc` supports doing profile-guided optimization (PGO). This chapter describes what PGO is and how the support for it is implemented in `rustc`. diff --git a/src/doc/rustc-dev-guide/src/queries/incremental-compilation-in-detail.md b/src/doc/rustc-dev-guide/src/queries/incremental-compilation-in-detail.md index 18e0e25c53152..46e38832e64d2 100644 --- a/src/doc/rustc-dev-guide/src/queries/incremental-compilation-in-detail.md +++ b/src/doc/rustc-dev-guide/src/queries/incremental-compilation-in-detail.md @@ -1,7 +1,5 @@ # Incremental compilation in detail - - The incremental compilation scheme is, in essence, a surprisingly simple extension to the overall query system. It relies on the fact that: diff --git a/src/doc/rustc-dev-guide/src/queries/incremental-compilation.md b/src/doc/rustc-dev-guide/src/queries/incremental-compilation.md index 6e5b4e8cc4992..731ff3287d9fe 100644 --- a/src/doc/rustc-dev-guide/src/queries/incremental-compilation.md +++ b/src/doc/rustc-dev-guide/src/queries/incremental-compilation.md @@ -1,7 +1,5 @@ # Incremental compilation - - The incremental compilation scheme is, in essence, a surprisingly simple extension to the overall query system. We'll start by describing a slightly simplified variant of the real thing – the "basic algorithm" – diff --git a/src/doc/rustc-dev-guide/src/queries/query-evaluation-model-in-detail.md b/src/doc/rustc-dev-guide/src/queries/query-evaluation-model-in-detail.md index 444e20bc580e3..c1a4373f7dac6 100644 --- a/src/doc/rustc-dev-guide/src/queries/query-evaluation-model-in-detail.md +++ b/src/doc/rustc-dev-guide/src/queries/query-evaluation-model-in-detail.md @@ -1,7 +1,5 @@ # The Query Evaluation Model in detail - - This chapter provides a deeper dive into the abstract model queries are built on. It does not go into implementation details but tries to explain the underlying logic. The examples here, therefore, have been stripped down and diff --git a/src/doc/rustc-dev-guide/src/queries/salsa.md b/src/doc/rustc-dev-guide/src/queries/salsa.md index 1a7b7fa9a6832..dc7160edc22cd 100644 --- a/src/doc/rustc-dev-guide/src/queries/salsa.md +++ b/src/doc/rustc-dev-guide/src/queries/salsa.md @@ -1,7 +1,5 @@ # How Salsa works - - This chapter is based on the explanation given by Niko Matsakis in this [video](https://www.youtube.com/watch?v=_muY4HjSqVw) about [Salsa](https://github.com/salsa-rs/salsa). To find out more you may diff --git a/src/doc/rustc-dev-guide/src/query.md b/src/doc/rustc-dev-guide/src/query.md index 0ca1b360a7014..8377a7b2f31a7 100644 --- a/src/doc/rustc-dev-guide/src/query.md +++ b/src/doc/rustc-dev-guide/src/query.md @@ -1,7 +1,5 @@ # Queries: demand-driven compilation - - As described in [Overview of the compiler], the Rust compiler is still (as of July 2021) transitioning from a traditional "pass-based" setup to a "demand-driven" system. The compiler query diff --git a/src/doc/rustc-dev-guide/src/rustdoc-internals.md b/src/doc/rustc-dev-guide/src/rustdoc-internals.md index 0234d4a920ed8..4affbafe4777a 100644 --- a/src/doc/rustc-dev-guide/src/rustdoc-internals.md +++ b/src/doc/rustc-dev-guide/src/rustdoc-internals.md @@ -1,7 +1,5 @@ # Rustdoc Internals - - This page describes [`rustdoc`]'s passes and modes. For an overview of `rustdoc`, see the ["Rustdoc overview" chapter](./rustdoc.md). diff --git a/src/doc/rustc-dev-guide/src/rustdoc-internals/search.md b/src/doc/rustc-dev-guide/src/rustdoc-internals/search.md index 3506431118baf..beff0a94c1ecd 100644 --- a/src/doc/rustc-dev-guide/src/rustdoc-internals/search.md +++ b/src/doc/rustc-dev-guide/src/rustdoc-internals/search.md @@ -7,8 +7,6 @@ in the crates in the doc bundle, and the second reads it, turns it into some in-memory structures, and scans them linearly to search. - - ## Search index format `search.js` calls this Raw, because it turns it into diff --git a/src/doc/rustc-dev-guide/src/rustdoc.md b/src/doc/rustc-dev-guide/src/rustdoc.md index 52ae48c3735c0..9290fcd3b41cb 100644 --- a/src/doc/rustc-dev-guide/src/rustdoc.md +++ b/src/doc/rustc-dev-guide/src/rustdoc.md @@ -9,8 +9,6 @@ For more details about how rustdoc works, see the [Rustdoc internals]: ./rustdoc-internals.md - - `rustdoc` uses `rustc` internals (and, of course, the standard library), so you will have to build the compiler and `std` once before you can build `rustdoc`. diff --git a/src/doc/rustc-dev-guide/src/stability.md b/src/doc/rustc-dev-guide/src/stability.md index 230925252bac3..d0cee54adb6a0 100644 --- a/src/doc/rustc-dev-guide/src/stability.md +++ b/src/doc/rustc-dev-guide/src/stability.md @@ -6,8 +6,6 @@ APIs to use unstable APIs internally in the rustc standard library. **NOTE**: this section is for *library* features, not *language* features. For instructions on stabilizing a language feature see [Stabilizing Features](./stabilization_guide.md). - - ## unstable The `#[unstable(feature = "foo", issue = "1234", reason = "lorem ipsum")]` diff --git a/src/doc/rustc-dev-guide/src/stabilization_guide.md b/src/doc/rustc-dev-guide/src/stabilization_guide.md index f155272e5a2c6..e399930fc523d 100644 --- a/src/doc/rustc-dev-guide/src/stabilization_guide.md +++ b/src/doc/rustc-dev-guide/src/stabilization_guide.md @@ -6,8 +6,6 @@ Once an unstable feature has been well-tested with no outstanding concerns, anyone may push for its stabilization, though involving the people who have worked on it is prudent. Follow these steps: - - ## Write an RFC, if needed If the feature was part of a [lang experiment], the lang team generally will want to first accept an RFC before stabilization. diff --git a/src/doc/rustc-dev-guide/src/test-implementation.md b/src/doc/rustc-dev-guide/src/test-implementation.md index e906dd29f25f5..f09d73631998f 100644 --- a/src/doc/rustc-dev-guide/src/test-implementation.md +++ b/src/doc/rustc-dev-guide/src/test-implementation.md @@ -1,7 +1,5 @@ # The `#[test]` attribute - - Many Rust programmers rely on a built-in attribute called `#[test]`. All diff --git a/src/doc/rustc-dev-guide/src/tests/adding.md b/src/doc/rustc-dev-guide/src/tests/adding.md index 895eabfbd56a7..e5c26bef11d0c 100644 --- a/src/doc/rustc-dev-guide/src/tests/adding.md +++ b/src/doc/rustc-dev-guide/src/tests/adding.md @@ -1,7 +1,5 @@ # Adding new tests - - **In general, we expect every PR that fixes a bug in rustc to come accompanied by a regression test of some kind.** This test should fail in master but pass after the PR. These tests are really useful for preventing us from repeating the diff --git a/src/doc/rustc-dev-guide/src/tests/compiletest.md b/src/doc/rustc-dev-guide/src/tests/compiletest.md index a108dfdef9b3f..4980ed845d6dd 100644 --- a/src/doc/rustc-dev-guide/src/tests/compiletest.md +++ b/src/doc/rustc-dev-guide/src/tests/compiletest.md @@ -1,7 +1,5 @@ # Compiletest - - ## Introduction `compiletest` is the main test harness of the Rust test suite. It allows test diff --git a/src/doc/rustc-dev-guide/src/tests/directives.md b/src/doc/rustc-dev-guide/src/tests/directives.md index 5c3ae359ba0bf..a16be9b482553 100644 --- a/src/doc/rustc-dev-guide/src/tests/directives.md +++ b/src/doc/rustc-dev-guide/src/tests/directives.md @@ -1,7 +1,5 @@ # Compiletest directives - - diff --git a/src/doc/rustc-dev-guide/src/tests/intro.md b/src/doc/rustc-dev-guide/src/tests/intro.md index 79b96c450a8d8..b90c16d602c31 100644 --- a/src/doc/rustc-dev-guide/src/tests/intro.md +++ b/src/doc/rustc-dev-guide/src/tests/intro.md @@ -1,7 +1,5 @@ # Testing the compiler - - The Rust project runs a wide variety of different tests, orchestrated by the build system (`./x test`). This section gives a brief overview of the different testing tools. Subsequent chapters dive into [running tests](running.md) and diff --git a/src/doc/rustc-dev-guide/src/tests/running.md b/src/doc/rustc-dev-guide/src/tests/running.md index 6526fe9c2357a..f6e313062cda8 100644 --- a/src/doc/rustc-dev-guide/src/tests/running.md +++ b/src/doc/rustc-dev-guide/src/tests/running.md @@ -1,7 +1,5 @@ # Running tests - - You can run the entire test collection using `x`. But note that running the *entire* test collection is almost never what you want to do during local development because it takes a really long time. For local development, see the diff --git a/src/doc/rustc-dev-guide/src/tests/ui.md b/src/doc/rustc-dev-guide/src/tests/ui.md index 782f78d76148e..c1e67e1b755c5 100644 --- a/src/doc/rustc-dev-guide/src/tests/ui.md +++ b/src/doc/rustc-dev-guide/src/tests/ui.md @@ -1,7 +1,5 @@ # UI tests - - UI tests are a particular [test suite](compiletest.md#test-suites) of compiletest. diff --git a/src/doc/rustc-dev-guide/src/thir.md b/src/doc/rustc-dev-guide/src/thir.md index 73d09ad80bf96..3d3dafaef49ba 100644 --- a/src/doc/rustc-dev-guide/src/thir.md +++ b/src/doc/rustc-dev-guide/src/thir.md @@ -1,7 +1,5 @@ # The THIR - - The THIR ("Typed High-Level Intermediate Representation"), previously called HAIR for "High-Level Abstract IR", is another IR used by rustc that is generated after [type checking]. It is (as of January 2024) used for diff --git a/src/doc/rustc-dev-guide/src/tracing.md b/src/doc/rustc-dev-guide/src/tracing.md index 0cfdf306e92d1..5e5b81fc65b22 100644 --- a/src/doc/rustc-dev-guide/src/tracing.md +++ b/src/doc/rustc-dev-guide/src/tracing.md @@ -1,7 +1,5 @@ # Using tracing to debug the compiler - - The compiler has a lot of [`debug!`] (or `trace!`) calls, which print out logging information at many points. These are very useful to at least narrow down the location of a bug if not to find it entirely, or just to orient yourself as to why the diff --git a/src/doc/rustc-dev-guide/src/traits/goals-and-clauses.md b/src/doc/rustc-dev-guide/src/traits/goals-and-clauses.md index 40fd4581bf3e5..2884ca5a05a1f 100644 --- a/src/doc/rustc-dev-guide/src/traits/goals-and-clauses.md +++ b/src/doc/rustc-dev-guide/src/traits/goals-and-clauses.md @@ -1,7 +1,5 @@ # Goals and clauses - - In logic programming terms, a **goal** is something that you must prove and a **clause** is something that you know is true. As described in the [lowering to logic](./lowering-to-logic.html) diff --git a/src/doc/rustc-dev-guide/src/traits/lowering-to-logic.md b/src/doc/rustc-dev-guide/src/traits/lowering-to-logic.md index 1248d434610b9..cc8b3bf800cb9 100644 --- a/src/doc/rustc-dev-guide/src/traits/lowering-to-logic.md +++ b/src/doc/rustc-dev-guide/src/traits/lowering-to-logic.md @@ -1,7 +1,5 @@ # Lowering to logic - - The key observation here is that the Rust trait system is basically a kind of logic, and it can be mapped onto standard logical inference rules. We can then look for solutions to those inference rules in a diff --git a/src/doc/rustc-dev-guide/src/traits/resolution.md b/src/doc/rustc-dev-guide/src/traits/resolution.md index c62b0593694f1..ccb2b04268e85 100644 --- a/src/doc/rustc-dev-guide/src/traits/resolution.md +++ b/src/doc/rustc-dev-guide/src/traits/resolution.md @@ -1,7 +1,5 @@ # Trait resolution (old-style) - - This chapter describes the general process of _trait resolution_ and points out some non-obvious things. diff --git a/src/doc/rustc-dev-guide/src/ty.md b/src/doc/rustc-dev-guide/src/ty.md index 767ac3fdba21b..4055f475e9923 100644 --- a/src/doc/rustc-dev-guide/src/ty.md +++ b/src/doc/rustc-dev-guide/src/ty.md @@ -1,7 +1,5 @@ # The `ty` module: representing types - - The `ty` module defines how the Rust compiler represents types internally. It also defines the *typing context* (`tcx` or `TyCtxt`), which is the central data structure in the compiler. diff --git a/src/doc/rustc-dev-guide/src/type-inference.md b/src/doc/rustc-dev-guide/src/type-inference.md index 888eb2439c5bc..2243205f129b5 100644 --- a/src/doc/rustc-dev-guide/src/type-inference.md +++ b/src/doc/rustc-dev-guide/src/type-inference.md @@ -1,7 +1,5 @@ # Type inference - - Type inference is the process of automatic detection of the type of an expression. diff --git a/src/doc/rustc-dev-guide/src/typing_parameter_envs.md b/src/doc/rustc-dev-guide/src/typing_parameter_envs.md index e21bc5155da15..db15467a47a06 100644 --- a/src/doc/rustc-dev-guide/src/typing_parameter_envs.md +++ b/src/doc/rustc-dev-guide/src/typing_parameter_envs.md @@ -1,7 +1,5 @@ # Typing/Parameter Environments - - ## Typing Environments When interacting with the type system there are a few variables to consider that can affect the results of trait solving. The set of in-scope where clauses, and what phase of the compiler type system operations are being performed in (the [`ParamEnv`][penv] and [`TypingMode`][tmode] structs respectively). diff --git a/src/doc/rustc-dev-guide/src/variance.md b/src/doc/rustc-dev-guide/src/variance.md index ad4fa4adfddb9..7aa0140715517 100644 --- a/src/doc/rustc-dev-guide/src/variance.md +++ b/src/doc/rustc-dev-guide/src/variance.md @@ -1,7 +1,5 @@ # Variance of type and lifetime parameters - - For a more general background on variance, see the [background] appendix. [background]: ./appendix/background.html diff --git a/src/doc/rustc-dev-guide/src/walkthrough.md b/src/doc/rustc-dev-guide/src/walkthrough.md index 48b3f8bb15d3b..b4c3379347ed2 100644 --- a/src/doc/rustc-dev-guide/src/walkthrough.md +++ b/src/doc/rustc-dev-guide/src/walkthrough.md @@ -1,7 +1,5 @@ # Walkthrough: a typical contribution - - There are _a lot_ of ways to contribute to the Rust compiler, including fixing bugs, improving performance, helping design features, providing feedback on existing features, etc. This chapter does not claim to scratch the surface. From 8b214fbf17d4e6f8e3ba38a26cffd741ac53adb8 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 29 Jul 2025 08:31:08 +0200 Subject: [PATCH 047/118] fix: Do not require all rename definitions to be renameable --- src/tools/rust-analyzer/crates/ide/src/rename.rs | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs index a07c647c2cb83..6c1d142c3b058 100644 --- a/src/tools/rust-analyzer/crates/ide/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs @@ -35,13 +35,8 @@ pub(crate) fn prepare_rename( let syntax = source_file.syntax(); let res = find_definitions(&sema, syntax, position, &Name::new_symbol_root(sym::underscore))? - .map(|(frange, kind, def, _, _)| { - // ensure all ranges are valid - - if def.range_for_rename(&sema).is_none() { - bail!("No references found at position") - } - + .filter(|(_, _, def, _, _)| def.range_for_rename(&sema).is_some()) + .map(|(frange, kind, _, _, _)| { always!( frange.range.contains_inclusive(position.offset) && frange.file_id == position.file_id From 1532b37010624c7657d6c9179cd6f01413bf7b56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Tue, 29 Jul 2025 10:07:27 +0200 Subject: [PATCH 048/118] Use GH app for authenticating sync PRs --- src/tools/rust-analyzer/.github/workflows/rustc-pull.yml | 3 ++- src/tools/rust-analyzer/triagebot.toml | 3 --- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml b/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml index 2a842f3b3114d..37cf5f3726b25 100644 --- a/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml +++ b/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml @@ -11,10 +11,11 @@ jobs: if: github.repository == 'rust-lang/rust-analyzer' uses: rust-lang/josh-sync/.github/workflows/rustc-pull.yml@main with: + github-app-id: ${{ vars.APP_CLIENT_ID }} zulip-stream-id: 185405 zulip-bot-email: "rust-analyzer-ci-bot@rust-lang.zulipchat.com" pr-base-branch: master branch-name: rustc-pull secrets: zulip-api-token: ${{ secrets.ZULIP_API_TOKEN }} - token: ${{ secrets.GITHUB_TOKEN }} + github-app-secret: ${{ secrets.APP_PRIVATE_KEY }} diff --git a/src/tools/rust-analyzer/triagebot.toml b/src/tools/rust-analyzer/triagebot.toml index 27fdb672455bc..c9862495bc0c6 100644 --- a/src/tools/rust-analyzer/triagebot.toml +++ b/src/tools/rust-analyzer/triagebot.toml @@ -28,6 +28,3 @@ labels = ["has-merge-commits", "S-waiting-on-author"] # Prevents mentions in commits to avoid users being spammed [no-mentions] - -# Automatically close and reopen PRs made by bots to run CI on them -[bot-pull-requests] From bad05ff4ffa5a61dcc8787d40796f9533b33b750 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Tue, 29 Jul 2025 12:37:06 +0300 Subject: [PATCH 049/118] In generate_mut_trait_impl, don't add a tabstop if the client does not support snippets --- .../src/handlers/generate_mut_trait_impl.rs | 51 ++++++++++++++++++- 1 file changed, 49 insertions(+), 2 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs index 9c4bcdd403042..ae1ae24d1ec1c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs @@ -104,7 +104,14 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_> format!("Generate `{trait_new}` impl from this `{trait_name}` trait"), target, |edit| { - edit.insert(target.start(), format!("$0{impl_def}\n\n{indent}")); + edit.insert( + target.start(), + if ctx.config.snippet_cap.is_some() { + format!("$0{impl_def}\n\n{indent}") + } else { + format!("{impl_def}\n\n{indent}") + }, + ); }, ) } @@ -161,7 +168,10 @@ fn process_ret_type(ref_ty: &ast::RetType) -> Option { #[cfg(test)] mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; + use crate::{ + AssistConfig, + tests::{TEST_CONFIG, check_assist, check_assist_not_applicable, check_assist_with_config}, + }; use super::*; @@ -402,6 +412,43 @@ impl Index$0 for [T; 3] {} pub trait AsRef {} impl AsRef$0 for [T; 3] {} +"#, + ); + } + + #[test] + fn no_snippets() { + check_assist_with_config( + generate_mut_trait_impl, + AssistConfig { snippet_cap: None, ..TEST_CONFIG }, + r#" +//- minicore: index +pub enum Axis { X = 0, Y = 1, Z = 2 } + +impl core::ops::Index$0 for [T; 3] { + type Output = T; + + fn index(&self, index: Axis) -> &Self::Output { + &self[index as usize] + } +} +"#, + r#" +pub enum Axis { X = 0, Y = 1, Z = 2 } + +impl core::ops::IndexMut for [T; 3] { + fn index_mut(&mut self, index: Axis) -> &mut Self::Output { + &mut self[index as usize] + } +} + +impl core::ops::Index for [T; 3] { + type Output = T; + + fn index(&self, index: Axis) -> &Self::Output { + &self[index as usize] + } +} "#, ); } From ea1e24a989721a1d285d6de913be2be2b5101439 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Tue, 29 Jul 2025 14:40:14 +0300 Subject: [PATCH 050/118] When displaying a projection into a type parameter that has bounds as `impl Trait`, collect only the bounds of this projection It used to collect the bounds of them all. --- .../crates/hir-ty/src/display.rs | 26 ++++++++-------- .../crates/ide/src/inlay_hints.rs | 30 +++++++++++++++++++ 2 files changed, 43 insertions(+), 13 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index b3760e3a3822a..f0e31ebd020ca 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -620,19 +620,19 @@ impl HirDisplay for ProjectionTy { .generic_predicates(id.parent) .iter() .map(|pred| pred.clone().substitute(Interner, &substs)) - .filter(|wc| match wc.skip_binders() { - WhereClause::Implemented(tr) => { - matches!( - tr.self_type_parameter(Interner).kind(Interner), - TyKind::Alias(_) - ) - } - WhereClause::TypeOutlives(t) => { - matches!(t.ty.kind(Interner), TyKind::Alias(_)) - } - // We shouldn't be here if these exist - WhereClause::AliasEq(_) => false, - WhereClause::LifetimeOutlives(_) => false, + .filter(|wc| { + let ty = match wc.skip_binders() { + WhereClause::Implemented(tr) => tr.self_type_parameter(Interner), + WhereClause::TypeOutlives(t) => t.ty.clone(), + // We shouldn't be here if these exist + WhereClause::AliasEq(_) | WhereClause::LifetimeOutlives(_) => { + return false; + } + }; + let TyKind::Alias(AliasTy::Projection(proj)) = ty.kind(Interner) else { + return false; + }; + proj == self }) .collect::>(); if !bounds.is_empty() { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index 19e5509681aad..671fddb436309 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -1065,4 +1065,34 @@ fn bar() { "#, ); } + + #[test] + fn regression_20239() { + check_with_config( + InlayHintsConfig { parameter_hints: true, type_hints: true, ..DISABLED_CONFIG }, + r#" +//- minicore: fn +trait Iterator { + type Item; + fn map B>(self, f: F); +} +trait ToString { + fn to_string(&self); +} + +fn check_tostr_eq(left: L, right: R) +where + L: Iterator, + L::Item: ToString, + R: Iterator, + R::Item: ToString, +{ + left.map(|s| s.to_string()); + // ^ impl ToString + right.map(|s| s.to_string()); + // ^ impl ToString +} + "#, + ); + } } From cca89bcb7d54b4117c12ac17928c77f59aba37c5 Mon Sep 17 00:00:00 2001 From: Hmikihiro <34ttrweoewiwe28@gmail.com> Date: Tue, 29 Jul 2025 20:59:50 +0900 Subject: [PATCH 051/118] add `SyntaxFactory::record_expr` to hide clone_for_update --- .../convert_tuple_struct_to_named_struct.rs | 20 +++++++++++-------- .../src/ast/syntax_factory/constructors.rs | 18 +++++++++++++++++ 2 files changed, 30 insertions(+), 8 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index f4041f49419a7..b27ebcaa4edf2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -4,7 +4,9 @@ use ide_db::defs::{Definition, NameRefClass}; use std::ops::RangeInclusive; use syntax::{ SyntaxElement, SyntaxKind, SyntaxNode, T, TextSize, - ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility}, + ast::{ + self, AstNode, HasAttrs, HasGenericParams, HasVisibility, syntax_factory::SyntaxFactory, + }, match_ast, syntax_editor::{Element, Position, SyntaxEditor}, }; @@ -105,7 +107,8 @@ fn edit_struct_def( ); ast::RecordField::cast(field_editor.finish().new_root().clone()) }); - let record_fields = ast::make::record_field_list(record_fields).clone_for_update(); + let make = SyntaxFactory::without_mappings(); + let record_fields = make.record_field_list(record_fields); let tuple_fields_before = Position::before(tuple_fields.syntax()); if let Either::Left(strukt) = strukt { @@ -145,10 +148,11 @@ fn edit_struct_references( let usages = strukt_def.usages(&ctx.sema).include_self_refs().all(); let edit_node = |node: SyntaxNode| -> Option { + let make = SyntaxFactory::without_mappings(); match_ast! { match node { ast::TupleStructPat(tuple_struct_pat) => { - Some(ast::make::record_pat_with_fields( + Some(make.record_pat_with_fields( tuple_struct_pat.path()?, ast::make::record_pat_field_list(tuple_struct_pat.fields().zip(names).map( |(pat, name)| { @@ -158,7 +162,7 @@ fn edit_struct_references( ) }, ), None), - ).syntax().clone_for_update()) + ).syntax().clone()) }, // for tuple struct creations like Foo(42) ast::CallExpr(call_expr) => { @@ -174,9 +178,8 @@ fn edit_struct_references( } let arg_list = call_expr.syntax().descendants().find_map(ast::ArgList::cast)?; - Some( - ast::make::record_expr( + make.record_expr( path, ast::make::record_expr_field_list(arg_list.args().zip(names).map( |(expr, name)| { @@ -186,7 +189,7 @@ fn edit_struct_references( ) }, )), - ).syntax().clone_for_update() + ).syntax().clone() ) }, _ => return None, @@ -271,11 +274,12 @@ fn edit_field_references( } fn generate_names(fields: impl Iterator) -> Vec { + let make = SyntaxFactory::without_mappings(); fields .enumerate() .map(|(i, _)| { let idx = i + 1; - ast::make::name(&format!("field{idx}")).clone_for_update() + make.name(&format!("field{idx}")) }) .collect() } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs index 1ba6107315126..738a26fed5d82 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs @@ -939,6 +939,24 @@ impl SyntaxFactory { ast } + pub fn record_expr( + &self, + path: ast::Path, + fields: ast::RecordExprFieldList, + ) -> ast::RecordExpr { + let ast = make::record_expr(path.clone(), fields.clone()).clone_for_update(); + if let Some(mut mapping) = self.mappings() { + let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone()); + builder.map_node(path.syntax().clone(), ast.path().unwrap().syntax().clone()); + builder.map_node( + fields.syntax().clone(), + ast.record_expr_field_list().unwrap().syntax().clone(), + ); + builder.finish(&mut mapping); + } + ast + } + pub fn record_expr_field( &self, name: ast::NameRef, From 08c6768190176719c3d85b177579a96a8e7d01c5 Mon Sep 17 00:00:00 2001 From: Hmikihiro <34ttrweoewiwe28@gmail.com> Date: Tue, 29 Jul 2025 23:09:59 +0900 Subject: [PATCH 052/118] replace `make::` to `SyntaxFactory::` in `inline_type_alias` --- .../src/handlers/inline_type_alias.rs | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs index 62535531435d8..ae8d130df23ca 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -9,10 +9,11 @@ use ide_db::{ search::FileReference, }; use itertools::Itertools; +use syntax::ast::syntax_factory::SyntaxFactory; use syntax::syntax_editor::SyntaxEditor; use syntax::{ AstNode, NodeOrToken, SyntaxNode, - ast::{self, HasGenericParams, HasName, make}, + ast::{self, HasGenericParams, HasName}, }; use crate::{ @@ -206,8 +207,8 @@ impl LifetimeMap { alias_generics: &ast::GenericParamList, ) -> Option { let mut inner = FxHashMap::default(); - - let wildcard_lifetime = make::lifetime("'_"); + let make = SyntaxFactory::without_mappings(); + let wildcard_lifetime = make.lifetime("'_"); let lifetimes = alias_generics .lifetime_params() .filter_map(|lp| lp.lifetime()) @@ -334,9 +335,10 @@ fn create_replacement( }; let new_string = replacement_syntax.to_string(); let new = if new_string == "_" { - make::wildcard_pat().syntax().clone_for_update() + let make = SyntaxFactory::without_mappings(); + make.wildcard_pat().syntax().clone() } else { - replacement_syntax.clone_for_update() + replacement_syntax.clone() }; replacements.push((syntax.clone(), new)); @@ -385,12 +387,15 @@ impl ConstOrTypeGeneric { } fn replacement_value(&self) -> Option { - Some(match self { - ConstOrTypeGeneric::ConstArg(ca) => ca.expr()?.syntax().clone(), - ConstOrTypeGeneric::TypeArg(ta) => ta.syntax().clone(), - ConstOrTypeGeneric::ConstParam(cp) => cp.default_val()?.syntax().clone(), - ConstOrTypeGeneric::TypeParam(tp) => tp.default_type()?.syntax().clone(), - }) + Some( + match self { + ConstOrTypeGeneric::ConstArg(ca) => ca.expr()?.syntax().clone(), + ConstOrTypeGeneric::TypeArg(ta) => ta.syntax().clone(), + ConstOrTypeGeneric::ConstParam(cp) => cp.default_val()?.syntax().clone(), + ConstOrTypeGeneric::TypeParam(tp) => tp.default_type()?.syntax().clone(), + } + .clone_for_update(), + ) } } From 4f8d4ca1908e6179c4094d5843db6a29cb323d4d Mon Sep 17 00:00:00 2001 From: Jieyou Xu Date: Wed, 30 Jul 2025 16:27:33 +0800 Subject: [PATCH 053/118] Update `codegen_{cranelift,gcc}` and `opt-dist` to use `build.compiletest-allow-stage0` --- build_system/src/test.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/build_system/src/test.rs b/build_system/src/test.rs index bc0fdd40b6e85..2c8271c36a94a 100644 --- a/build_system/src/test.rs +++ b/build_system/src/test.rs @@ -561,8 +561,6 @@ fn asm_tests(env: &Env, args: &TestArg) -> Result<(), String> { // FIXME: create a function "display_if_not_quiet" or something along the line. println!("[TEST] rustc asm test suite"); - env.insert("COMPILETEST_FORCE_STAGE0".to_string(), "1".to_string()); - let codegen_backend_path = format!( "{pwd}/target/{channel}/librustc_codegen_gcc.{dylib_ext}", pwd = std::env::current_dir() @@ -588,6 +586,8 @@ fn asm_tests(env: &Env, args: &TestArg) -> Result<(), String> { &"always", &"--stage", &"0", + &"--set", + &"build.compiletest-allow-stage0=true", &"tests/assembly-llvm/asm", &"--compiletest-rustc-args", &rustc_args, @@ -1047,7 +1047,6 @@ where // FIXME: create a function "display_if_not_quiet" or something along the line. println!("[TEST] rustc {test_type} test suite"); - env.insert("COMPILETEST_FORCE_STAGE0".to_string(), "1".to_string()); let extra = if args.is_using_gcc_master_branch() { "" } else { " -Csymbol-mangling-version=v0" }; @@ -1070,6 +1069,8 @@ where &"always", &"--stage", &"0", + &"--set", + &"build.compiletest-allow-stage0=true", &format!("tests/{test_type}"), &"--compiletest-rustc-args", &rustc_args, From 9fb99109b66626b51343ba6adee61274b0a67a50 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Wed, 30 Jul 2025 15:45:20 +0200 Subject: [PATCH 054/118] Regenerate intrinsics mapping --- src/intrinsic/archs.rs | 60 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/src/intrinsic/archs.rs b/src/intrinsic/archs.rs index 915ed875e32f4..d1b2a93243d27 100644 --- a/src/intrinsic/archs.rs +++ b/src/intrinsic/archs.rs @@ -95,8 +95,11 @@ fn map_arch_intrinsic(full_name: &str) -> &'static str { "cubema" => "__builtin_amdgcn_cubema", "cubesc" => "__builtin_amdgcn_cubesc", "cubetc" => "__builtin_amdgcn_cubetc", + "cvt.f16.bf8" => "__builtin_amdgcn_cvt_f16_bf8", + "cvt.f16.fp8" => "__builtin_amdgcn_cvt_f16_fp8", "cvt.f32.bf8" => "__builtin_amdgcn_cvt_f32_bf8", "cvt.f32.fp8" => "__builtin_amdgcn_cvt_f32_fp8", + "cvt.f32.fp8.e5m3" => "__builtin_amdgcn_cvt_f32_fp8_e5m3", "cvt.off.f32.i4" => "__builtin_amdgcn_cvt_off_f32_i4", "cvt.pk.bf8.f32" => "__builtin_amdgcn_cvt_pk_bf8_f32", "cvt.pk.f16.bf8" => "__builtin_amdgcn_cvt_pk_f16_bf8", @@ -181,6 +184,12 @@ fn map_arch_intrinsic(full_name: &str) -> &'static str { "dot4.f32.fp8.bf8" => "__builtin_amdgcn_dot4_f32_fp8_bf8", "dot4.f32.fp8.fp8" => "__builtin_amdgcn_dot4_f32_fp8_fp8", "ds.add.gs.reg.rtn" => "__builtin_amdgcn_ds_add_gs_reg_rtn", + "ds.atomic.async.barrier.arrive.b64" => { + "__builtin_amdgcn_ds_atomic_async_barrier_arrive_b64" + } + "ds.atomic.barrier.arrive.rtn.b64" => { + "__builtin_amdgcn_ds_atomic_barrier_arrive_rtn_b64" + } "ds.bpermute" => "__builtin_amdgcn_ds_bpermute", "ds.bpermute.fi.b32" => "__builtin_amdgcn_ds_bpermute_fi_b32", "ds.gws.barrier" => "__builtin_amdgcn_ds_gws_barrier", @@ -198,8 +207,32 @@ fn map_arch_intrinsic(full_name: &str) -> &'static str { "fdot2.f16.f16" => "__builtin_amdgcn_fdot2_f16_f16", "fdot2.f32.bf16" => "__builtin_amdgcn_fdot2_f32_bf16", "fdot2c.f32.bf16" => "__builtin_amdgcn_fdot2c_f32_bf16", + "flat.prefetch" => "__builtin_amdgcn_flat_prefetch", "fmul.legacy" => "__builtin_amdgcn_fmul_legacy", + "global.load.async.to.lds.b128" => { + "__builtin_amdgcn_global_load_async_to_lds_b128" + } + "global.load.async.to.lds.b32" => { + "__builtin_amdgcn_global_load_async_to_lds_b32" + } + "global.load.async.to.lds.b64" => { + "__builtin_amdgcn_global_load_async_to_lds_b64" + } + "global.load.async.to.lds.b8" => "__builtin_amdgcn_global_load_async_to_lds_b8", "global.load.lds" => "__builtin_amdgcn_global_load_lds", + "global.prefetch" => "__builtin_amdgcn_global_prefetch", + "global.store.async.from.lds.b128" => { + "__builtin_amdgcn_global_store_async_from_lds_b128" + } + "global.store.async.from.lds.b32" => { + "__builtin_amdgcn_global_store_async_from_lds_b32" + } + "global.store.async.from.lds.b64" => { + "__builtin_amdgcn_global_store_async_from_lds_b64" + } + "global.store.async.from.lds.b8" => { + "__builtin_amdgcn_global_store_async_from_lds_b8" + } "groupstaticsize" => "__builtin_amdgcn_groupstaticsize", "iglp.opt" => "__builtin_amdgcn_iglp_opt", "implicit.buffer.ptr" => "__builtin_amdgcn_implicit_buffer_ptr", @@ -291,6 +324,7 @@ fn map_arch_intrinsic(full_name: &str) -> &'static str { "s.incperflevel" => "__builtin_amdgcn_s_incperflevel", "s.memrealtime" => "__builtin_amdgcn_s_memrealtime", "s.memtime" => "__builtin_amdgcn_s_memtime", + "s.monitor.sleep" => "__builtin_amdgcn_s_monitor_sleep", "s.sendmsg" => "__builtin_amdgcn_s_sendmsg", "s.sendmsghalt" => "__builtin_amdgcn_s_sendmsghalt", "s.setprio" => "__builtin_amdgcn_s_setprio", @@ -300,11 +334,15 @@ fn map_arch_intrinsic(full_name: &str) -> &'static str { "s.sleep.var" => "__builtin_amdgcn_s_sleep_var", "s.ttracedata" => "__builtin_amdgcn_s_ttracedata", "s.ttracedata.imm" => "__builtin_amdgcn_s_ttracedata_imm", + "s.wait.asynccnt" => "__builtin_amdgcn_s_wait_asynccnt", "s.wait.event.export.ready" => "__builtin_amdgcn_s_wait_event_export_ready", + "s.wait.tensorcnt" => "__builtin_amdgcn_s_wait_tensorcnt", "s.waitcnt" => "__builtin_amdgcn_s_waitcnt", "sad.hi.u8" => "__builtin_amdgcn_sad_hi_u8", "sad.u16" => "__builtin_amdgcn_sad_u16", "sad.u8" => "__builtin_amdgcn_sad_u8", + "sat.pk4.i4.i8" => "__builtin_amdgcn_sat_pk4_i4_i8", + "sat.pk4.u4.u8" => "__builtin_amdgcn_sat_pk4_u4_u8", "sched.barrier" => "__builtin_amdgcn_sched_barrier", "sched.group.barrier" => "__builtin_amdgcn_sched_group_barrier", "sdot2" => "__builtin_amdgcn_sdot2", @@ -346,8 +384,13 @@ fn map_arch_intrinsic(full_name: &str) -> &'static str { "smfmac.i32.16x16x64.i8" => "__builtin_amdgcn_smfmac_i32_16x16x64_i8", "smfmac.i32.32x32x32.i8" => "__builtin_amdgcn_smfmac_i32_32x32x32_i8", "smfmac.i32.32x32x64.i8" => "__builtin_amdgcn_smfmac_i32_32x32x64_i8", + "struct.ptr.buffer.load.lds" => "__builtin_amdgcn_struct_ptr_buffer_load_lds", "sudot4" => "__builtin_amdgcn_sudot4", "sudot8" => "__builtin_amdgcn_sudot8", + "tensor.load.to.lds" => "__builtin_amdgcn_tensor_load_to_lds", + "tensor.load.to.lds.d2" => "__builtin_amdgcn_tensor_load_to_lds_d2", + "tensor.store.from.lds" => "__builtin_amdgcn_tensor_store_from_lds", + "tensor.store.from.lds.d2" => "__builtin_amdgcn_tensor_store_from_lds_d2", "udot2" => "__builtin_amdgcn_udot2", "udot4" => "__builtin_amdgcn_udot4", "udot8" => "__builtin_amdgcn_udot8", @@ -6326,6 +6369,23 @@ fn map_arch_intrinsic(full_name: &str) -> &'static str { } s390(name, full_name) } + "spv" => { + #[allow(non_snake_case)] + fn spv(name: &str, full_name: &str) -> &'static str { + match name { + // spv + "num.subgroups" => "__builtin_spirv_num_subgroups", + "subgroup.id" => "__builtin_spirv_subgroup_id", + "subgroup.local.invocation.id" => { + "__builtin_spirv_subgroup_local_invocation_id" + } + "subgroup.max.size" => "__builtin_spirv_subgroup_max_size", + "subgroup.size" => "__builtin_spirv_subgroup_size", + _ => unimplemented!("***** unsupported LLVM intrinsic {full_name}"), + } + } + spv(name, full_name) + } "ve" => { #[allow(non_snake_case)] fn ve(name: &str, full_name: &str) -> &'static str { From 2918a2b5505299e0d4164a776c0bdc745bb69736 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 30 Jul 2025 15:31:18 +0200 Subject: [PATCH 055/118] Abtract away json protocol for proc-macro-srv --- src/tools/rust-analyzer/Cargo.lock | 32 ++++ .../crates/load-cargo/src/lib.rs | 2 +- .../proc-macro-api/src/legacy_protocol.rs | 172 ++++++++++++++++++ .../proc-macro-api/src/legacy_protocol/msg.rs | 25 +-- .../src/legacy_protocol/msg/flat.rs | 105 ++++++----- .../crates/proc-macro-api/src/lib.rs | 93 +++------- .../crates/proc-macro-api/src/process.rs | 113 +++++------- .../crates/proc-macro-srv-cli/Cargo.toml | 1 + .../crates/proc-macro-srv-cli/src/main.rs | 39 +++- .../proc-macro-srv-cli/src/main_loop.rs | 63 +++++-- .../crates/proc-macro-srv/src/lib.rs | 8 +- .../src/server_impl/token_id.rs | 53 +++--- .../crates/proc-macro-srv/src/tests/utils.rs | 14 +- .../rust-analyzer/crates/span/src/lib.rs | 12 -- 14 files changed, 472 insertions(+), 260 deletions(-) create mode 100644 src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index c19e8471647fe..0cbbb5dd6de77 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -23,6 +23,12 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" +[[package]] +name = "anstyle" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" + [[package]] name = "anyhow" version = "1.0.98" @@ -287,6 +293,31 @@ dependencies = [ "tracing", ] +[[package]] +name = "clap" +version = "4.5.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed87a9d530bb41a67537289bafcac159cb3ee28460e0a4571123d2a778a6a882" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.5.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64f4f3f3c77c94aff3c7e9aac9a2ca1974a5adf392a8bb751e827d6d127ab966" +dependencies = [ + "anstyle", + "clap_lex", +] + +[[package]] +name = "clap_lex" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" + [[package]] name = "countme" version = "3.0.1" @@ -1615,6 +1646,7 @@ dependencies = [ name = "proc-macro-srv-cli" version = "0.0.0" dependencies = [ + "clap", "proc-macro-api", "proc-macro-srv", "tt", diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index 26ee698af0812..98f415a522cb8 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -533,7 +533,7 @@ impl ProcMacroExpander for Expander { current_dir, ) { Ok(Ok(subtree)) => Ok(subtree), - Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), + Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err)), Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs new file mode 100644 index 0000000000000..ee96b899fe57f --- /dev/null +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol.rs @@ -0,0 +1,172 @@ +//! The initial proc-macro-srv protocol, soon to be deprecated. + +pub mod json; +pub mod msg; + +use std::{ + io::{BufRead, Write}, + sync::Arc, +}; + +use paths::AbsPath; +use span::Span; + +use crate::{ + ProcMacro, ProcMacroKind, ServerError, + legacy_protocol::{ + json::{read_json, write_json}, + msg::{ + ExpandMacro, ExpandMacroData, ExpnGlobals, FlatTree, Message, Request, Response, + ServerConfig, SpanDataIndexMap, deserialize_span_data_index_map, + flat::serialize_span_data_index_map, + }, + }, + process::ProcMacroServerProcess, + version, +}; + +pub(crate) use crate::legacy_protocol::msg::SpanMode; + +/// Legacy span type, only defined here as it is still used by the proc-macro server. +/// While rust-analyzer doesn't use this anymore at all, RustRover relies on the legacy type for +/// proc-macro expansion. +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct SpanId(pub u32); + +impl std::fmt::Debug for SpanId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +pub(crate) fn version_check(srv: &ProcMacroServerProcess) -> Result { + let request = Request::ApiVersionCheck {}; + let response = send_task(srv, request)?; + + match response { + Response::ApiVersionCheck(version) => Ok(version), + _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + } +} + +/// Enable support for rust-analyzer span mode if the server supports it. +pub(crate) fn enable_rust_analyzer_spans( + srv: &ProcMacroServerProcess, +) -> Result { + let request = Request::SetConfig(ServerConfig { span_mode: SpanMode::RustAnalyzer }); + let response = send_task(srv, request)?; + + match response { + Response::SetConfig(ServerConfig { span_mode }) => Ok(span_mode), + _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + } +} + +/// Finds proc-macros in a given dynamic library. +pub(crate) fn find_proc_macros( + srv: &ProcMacroServerProcess, + dylib_path: &AbsPath, +) -> Result, String>, ServerError> { + let request = Request::ListMacros { dylib_path: dylib_path.to_path_buf().into() }; + + let response = send_task(srv, request)?; + + match response { + Response::ListMacros(it) => Ok(it), + _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + } +} + +pub(crate) fn expand( + proc_macro: &ProcMacro, + subtree: tt::SubtreeView<'_, Span>, + attr: Option>, + env: Vec<(String, String)>, + def_site: Span, + call_site: Span, + mixed_site: Span, + current_dir: String, +) -> Result>, String>, crate::ServerError> +{ + let version = proc_macro.process.version(); + let mut span_data_table = SpanDataIndexMap::default(); + let def_site = span_data_table.insert_full(def_site).0; + let call_site = span_data_table.insert_full(call_site).0; + let mixed_site = span_data_table.insert_full(mixed_site).0; + let task = ExpandMacro { + data: ExpandMacroData { + macro_body: FlatTree::new(subtree, version, &mut span_data_table), + macro_name: proc_macro.name.to_string(), + attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)), + has_global_spans: ExpnGlobals { + serialize: version >= version::HAS_GLOBAL_SPANS, + def_site, + call_site, + mixed_site, + }, + span_data_table: if proc_macro.process.rust_analyzer_spans() { + serialize_span_data_index_map(&span_data_table) + } else { + Vec::new() + }, + }, + lib: proc_macro.dylib_path.to_path_buf().into(), + env, + current_dir: Some(current_dir), + }; + + let response = send_task(&proc_macro.process, Request::ExpandMacro(Box::new(task)))?; + + match response { + Response::ExpandMacro(it) => Ok(it + .map(|tree| { + let mut expanded = FlatTree::to_subtree_resolved(tree, version, &span_data_table); + if proc_macro.needs_fixup_change() { + proc_macro.change_fixup_to_match_old_server(&mut expanded); + } + expanded + }) + .map_err(|msg| msg.0)), + Response::ExpandMacroExtended(it) => Ok(it + .map(|resp| { + let mut expanded = FlatTree::to_subtree_resolved( + resp.tree, + version, + &deserialize_span_data_index_map(&resp.span_data_table), + ); + if proc_macro.needs_fixup_change() { + proc_macro.change_fixup_to_match_old_server(&mut expanded); + } + expanded + }) + .map_err(|msg| msg.0)), + _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + } +} + +/// Sends a request to the proc-macro server and waits for a response. +fn send_task(srv: &ProcMacroServerProcess, req: Request) -> Result { + if let Some(server_error) = srv.exited() { + return Err(server_error.clone()); + } + + srv.send_task(send_request, req) +} + +/// Sends a request to the server and reads the response. +fn send_request( + mut writer: &mut dyn Write, + mut reader: &mut dyn BufRead, + req: Request, + buf: &mut String, +) -> Result, ServerError> { + req.write(write_json, &mut writer).map_err(|err| ServerError { + message: "failed to write request".into(), + io: Some(Arc::new(err)), + })?; + let res = Response::read(read_json, &mut reader, buf).map_err(|err| ServerError { + message: "failed to read response".into(), + io: Some(Arc::new(err)), + })?; + Ok(res) +} diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs index 165936269d35d..b795c45589564 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs @@ -1,5 +1,6 @@ //! Defines messages for cross-process message passing based on `ndjson` wire protocol pub(crate) mod flat; +pub use self::flat::*; use std::io::{self, BufRead, Write}; @@ -9,24 +10,6 @@ use serde_derive::{Deserialize, Serialize}; use crate::ProcMacroKind; -pub use self::flat::{ - FlatTree, SpanDataIndexMap, deserialize_span_data_index_map, serialize_span_data_index_map, -}; -pub use span::TokenId; - -// The versions of the server protocol -pub const NO_VERSION_CHECK_VERSION: u32 = 0; -pub const VERSION_CHECK_VERSION: u32 = 1; -pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2; -pub const HAS_GLOBAL_SPANS: u32 = 3; -pub const RUST_ANALYZER_SPAN_SUPPORT: u32 = 4; -/// Whether literals encode their kind as an additional u32 field and idents their rawness as a u32 field. -pub const EXTENDED_LEAF_DATA: u32 = 5; -pub const HASHED_AST_ID: u32 = 6; - -/// Current API version of the proc-macro protocol. -pub const CURRENT_API_VERSION: u32 = HASHED_AST_ID; - /// Represents requests sent from the client to the proc-macro-srv. #[derive(Debug, Serialize, Deserialize)] pub enum Request { @@ -48,7 +31,7 @@ pub enum Request { } /// Defines the mode used for handling span data. -#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)] +#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq)] pub enum SpanMode { /// Default mode, where spans are identified by an ID. #[default] @@ -210,6 +193,8 @@ mod tests { TopSubtreeBuilder, }; + use crate::version; + use super::*; fn fixture_token_tree() -> TopSubtree { @@ -308,7 +293,7 @@ mod tests { #[test] fn test_proc_macro_rpc_works() { let tt = fixture_token_tree(); - for v in RUST_ANALYZER_SPAN_SUPPORT..=CURRENT_API_VERSION { + for v in version::RUST_ANALYZER_SPAN_SUPPORT..=version::CURRENT_API_VERSION { let mut span_data_table = Default::default(); let task = ExpandMacro { data: ExpandMacroData { diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs index 597ffa05d203e..fb3542d24f460 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs @@ -40,9 +40,12 @@ use std::collections::VecDeque; use intern::Symbol; use rustc_hash::FxHashMap; use serde_derive::{Deserialize, Serialize}; -use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContext, TextRange, TokenId}; +use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContext, TextRange}; -use crate::legacy_protocol::msg::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA}; +use crate::{ + legacy_protocol::SpanId, + version::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA}, +}; pub type SpanDataIndexMap = indexmap::IndexSet>; @@ -62,7 +65,7 @@ pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec { } pub fn deserialize_span_data_index_map(map: &[u32]) -> SpanDataIndexMap { - debug_assert!(map.len() % 5 == 0); + debug_assert!(map.len().is_multiple_of(5)); map.chunks_exact(5) .map(|span| { let &[file_id, ast_id, start, end, e] = span else { unreachable!() }; @@ -91,27 +94,27 @@ pub struct FlatTree { } struct SubtreeRepr { - open: TokenId, - close: TokenId, + open: SpanId, + close: SpanId, kind: tt::DelimiterKind, tt: [u32; 2], } struct LiteralRepr { - id: TokenId, + id: SpanId, text: u32, suffix: u32, kind: u16, } struct PunctRepr { - id: TokenId, + id: SpanId, char: char, spacing: tt::Spacing, } struct IdentRepr { - id: TokenId, + id: SpanId, text: u32, is_raw: bool, } @@ -122,7 +125,7 @@ impl FlatTree { version: u32, span_data_table: &mut SpanDataIndexMap, ) -> FlatTree { - let mut w = Writer { + let mut w = Writer:: { string_table: FxHashMap::default(), work: VecDeque::new(), span_data_table, @@ -159,8 +162,11 @@ impl FlatTree { } } - pub fn new_raw(subtree: tt::SubtreeView<'_, TokenId>, version: u32) -> FlatTree { - let mut w = Writer { + pub fn new_raw>( + subtree: tt::SubtreeView<'_, T::Span>, + version: u32, + ) -> FlatTree { + let mut w = Writer:: { string_table: FxHashMap::default(), work: VecDeque::new(), span_data_table: &mut (), @@ -202,7 +208,7 @@ impl FlatTree { version: u32, span_data_table: &SpanDataIndexMap, ) -> tt::TopSubtree { - Reader { + Reader:: { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { read_vec(self.subtree, SubtreeRepr::read_with_close_span) } else { @@ -227,8 +233,11 @@ impl FlatTree { .read() } - pub fn to_subtree_unresolved(self, version: u32) -> tt::TopSubtree { - Reader { + pub fn to_subtree_unresolved>( + self, + version: u32, + ) -> tt::TopSubtree { + Reader:: { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { read_vec(self.subtree, SubtreeRepr::read_with_close_span) } else { @@ -283,7 +292,7 @@ impl SubtreeRepr { 3 => tt::DelimiterKind::Bracket, other => panic!("bad kind {other}"), }; - SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] } + SubtreeRepr { open: SpanId(open), close: SpanId(!0), kind, tt: [lo, len] } } fn write_with_close_span(self) -> [u32; 5] { let kind = match self.kind { @@ -302,7 +311,7 @@ impl SubtreeRepr { 3 => tt::DelimiterKind::Bracket, other => panic!("bad kind {other}"), }; - SubtreeRepr { open: TokenId(open), close: TokenId(close), kind, tt: [lo, len] } + SubtreeRepr { open: SpanId(open), close: SpanId(close), kind, tt: [lo, len] } } } @@ -311,13 +320,13 @@ impl LiteralRepr { [self.id.0, self.text] } fn read([id, text]: [u32; 2]) -> LiteralRepr { - LiteralRepr { id: TokenId(id), text, kind: 0, suffix: !0 } + LiteralRepr { id: SpanId(id), text, kind: 0, suffix: !0 } } fn write_with_kind(self) -> [u32; 4] { [self.id.0, self.text, self.kind as u32, self.suffix] } fn read_with_kind([id, text, kind, suffix]: [u32; 4]) -> LiteralRepr { - LiteralRepr { id: TokenId(id), text, kind: kind as u16, suffix } + LiteralRepr { id: SpanId(id), text, kind: kind as u16, suffix } } } @@ -335,7 +344,7 @@ impl PunctRepr { 1 => tt::Spacing::Joint, other => panic!("bad spacing {other}"), }; - PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing } + PunctRepr { id: SpanId(id), char: char.try_into().unwrap(), spacing } } } @@ -344,44 +353,46 @@ impl IdentRepr { [self.id.0, self.text] } fn read(data: [u32; 2]) -> IdentRepr { - IdentRepr { id: TokenId(data[0]), text: data[1], is_raw: false } + IdentRepr { id: SpanId(data[0]), text: data[1], is_raw: false } } fn write_with_rawness(self) -> [u32; 3] { [self.id.0, self.text, self.is_raw as u32] } fn read_with_rawness([id, text, is_raw]: [u32; 3]) -> IdentRepr { - IdentRepr { id: TokenId(id), text, is_raw: is_raw == 1 } + IdentRepr { id: SpanId(id), text, is_raw: is_raw == 1 } } } -trait InternableSpan: Copy { +pub trait SpanTransformer { type Table; - fn token_id_of(table: &mut Self::Table, s: Self) -> TokenId; - fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self; + type Span: Copy; + fn token_id_of(table: &mut Self::Table, s: Self::Span) -> SpanId; + fn span_for_token_id(table: &Self::Table, id: SpanId) -> Self::Span; } - -impl InternableSpan for TokenId { +impl SpanTransformer for SpanId { type Table = (); - fn token_id_of((): &mut Self::Table, token_id: Self) -> TokenId { + type Span = Self; + fn token_id_of((): &mut Self::Table, token_id: Self::Span) -> SpanId { token_id } - fn span_for_token_id((): &Self::Table, id: TokenId) -> Self { + fn span_for_token_id((): &Self::Table, id: SpanId) -> Self::Span { id } } -impl InternableSpan for Span { +impl SpanTransformer for Span { type Table = SpanDataIndexMap; - fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId { - TokenId(table.insert_full(span).0 as u32) + type Span = Self; + fn token_id_of(table: &mut Self::Table, span: Self::Span) -> SpanId { + SpanId(table.insert_full(span).0 as u32) } - fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self { + fn span_for_token_id(table: &Self::Table, id: SpanId) -> Self::Span { *table.get_index(id.0 as usize).unwrap_or_else(|| &table[0]) } } -struct Writer<'a, 'span, S: InternableSpan> { - work: VecDeque<(usize, tt::iter::TtIter<'a, S>)>, +struct Writer<'a, 'span, S: SpanTransformer> { + work: VecDeque<(usize, tt::iter::TtIter<'a, S::Span>)>, string_table: FxHashMap, u32>, span_data_table: &'span mut S::Table, version: u32, @@ -394,8 +405,8 @@ struct Writer<'a, 'span, S: InternableSpan> { text: Vec, } -impl<'a, S: InternableSpan> Writer<'a, '_, S> { - fn write(&mut self, root: tt::SubtreeView<'a, S>) { +impl<'a, T: SpanTransformer> Writer<'a, '_, T> { + fn write(&mut self, root: tt::SubtreeView<'a, T::Span>) { let subtree = root.top_subtree(); self.enqueue(subtree, root.iter()); while let Some((idx, subtree)) = self.work.pop_front() { @@ -403,11 +414,11 @@ impl<'a, S: InternableSpan> Writer<'a, '_, S> { } } - fn token_id_of(&mut self, span: S) -> TokenId { - S::token_id_of(self.span_data_table, span) + fn token_id_of(&mut self, span: T::Span) -> SpanId { + T::token_id_of(self.span_data_table, span) } - fn subtree(&mut self, idx: usize, subtree: tt::iter::TtIter<'a, S>) { + fn subtree(&mut self, idx: usize, subtree: tt::iter::TtIter<'a, T::Span>) { let mut first_tt = self.token_tree.len(); let n_tt = subtree.clone().count(); // FIXME: `count()` walks over the entire iterator. self.token_tree.resize(first_tt + n_tt, !0); @@ -478,7 +489,11 @@ impl<'a, S: InternableSpan> Writer<'a, '_, S> { } } - fn enqueue(&mut self, subtree: &'a tt::Subtree, contents: tt::iter::TtIter<'a, S>) -> u32 { + fn enqueue( + &mut self, + subtree: &'a tt::Subtree, + contents: tt::iter::TtIter<'a, T::Span>, + ) -> u32 { let idx = self.subtree.len(); let open = self.token_id_of(subtree.delimiter.open); let close = self.token_id_of(subtree.delimiter.close); @@ -507,7 +522,7 @@ impl<'a, S: InternableSpan> Writer<'a, '_, S> { } } -struct Reader<'span, S: InternableSpan> { +struct Reader<'span, S: SpanTransformer> { version: u32, subtree: Vec, literal: Vec, @@ -518,11 +533,11 @@ struct Reader<'span, S: InternableSpan> { span_data_table: &'span S::Table, } -impl Reader<'_, S> { - pub(crate) fn read(self) -> tt::TopSubtree { - let mut res: Vec, Vec>)>> = +impl Reader<'_, T> { + pub(crate) fn read(self) -> tt::TopSubtree { + let mut res: Vec, Vec>)>> = vec![None; self.subtree.len()]; - let read_span = |id| S::span_for_token_id(self.span_data_table, id); + let read_span = |id| T::span_for_token_id(self.span_data_table, id); for i in (0..self.subtree.len()).rev() { let repr = &self.subtree[i]; let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize]; diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs index 516c7418bde8f..97919b85b5130 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs @@ -5,24 +5,29 @@ //! is used to provide basic infrastructure for communication between two //! processes: Client (RA itself), Server (the external program) -pub mod legacy_protocol { - pub mod json; - pub mod msg; -} +pub mod legacy_protocol; mod process; use paths::{AbsPath, AbsPathBuf}; use span::{ErasedFileAstId, FIXUP_ERASED_FILE_AST_ID_MARKER, Span}; use std::{fmt, io, sync::Arc, time::SystemTime}; -use crate::{ - legacy_protocol::msg::{ - ExpandMacro, ExpandMacroData, ExpnGlobals, FlatTree, HAS_GLOBAL_SPANS, HASHED_AST_ID, - PanicMessage, RUST_ANALYZER_SPAN_SUPPORT, Request, Response, SpanDataIndexMap, - deserialize_span_data_index_map, flat::serialize_span_data_index_map, - }, - process::ProcMacroServerProcess, -}; +use crate::process::ProcMacroServerProcess; + +/// The versions of the server protocol +pub mod version { + pub const NO_VERSION_CHECK_VERSION: u32 = 0; + pub const VERSION_CHECK_VERSION: u32 = 1; + pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2; + pub const HAS_GLOBAL_SPANS: u32 = 3; + pub const RUST_ANALYZER_SPAN_SUPPORT: u32 = 4; + /// Whether literals encode their kind as an additional u32 field and idents their rawness as a u32 field. + pub const EXTENDED_LEAF_DATA: u32 = 5; + pub const HASHED_AST_ID: u32 = 6; + + /// Current API version of the proc-macro protocol. + pub const CURRENT_API_VERSION: u32 = HASHED_AST_ID; +} /// Represents different kinds of procedural macros that can be expanded by the external server. #[derive(Copy, Clone, Eq, PartialEq, Debug, serde_derive::Serialize, serde_derive::Deserialize)] @@ -163,7 +168,7 @@ impl ProcMacro { fn needs_fixup_change(&self) -> bool { let version = self.process.version(); - (RUST_ANALYZER_SPAN_SUPPORT..HASHED_AST_ID).contains(&version) + (version::RUST_ANALYZER_SPAN_SUPPORT..version::HASHED_AST_ID).contains(&version) } /// On some server versions, the fixup ast id is different than ours. So change it to match. @@ -204,7 +209,7 @@ impl ProcMacro { call_site: Span, mixed_site: Span, current_dir: String, - ) -> Result, PanicMessage>, ServerError> { + ) -> Result, String>, ServerError> { let (mut subtree, mut attr) = (subtree, attr); let (mut subtree_changed, mut attr_changed); if self.needs_fixup_change() { @@ -219,57 +224,15 @@ impl ProcMacro { } } - let version = self.process.version(); - - let mut span_data_table = SpanDataIndexMap::default(); - let def_site = span_data_table.insert_full(def_site).0; - let call_site = span_data_table.insert_full(call_site).0; - let mixed_site = span_data_table.insert_full(mixed_site).0; - let task = ExpandMacro { - data: ExpandMacroData { - macro_body: FlatTree::new(subtree, version, &mut span_data_table), - macro_name: self.name.to_string(), - attributes: attr - .map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)), - has_global_spans: ExpnGlobals { - serialize: version >= HAS_GLOBAL_SPANS, - def_site, - call_site, - mixed_site, - }, - span_data_table: if version >= RUST_ANALYZER_SPAN_SUPPORT { - serialize_span_data_index_map(&span_data_table) - } else { - Vec::new() - }, - }, - lib: self.dylib_path.to_path_buf().into(), + legacy_protocol::expand( + self, + subtree, + attr, env, - current_dir: Some(current_dir), - }; - - let response = self.process.send_task(Request::ExpandMacro(Box::new(task)))?; - - match response { - Response::ExpandMacro(it) => Ok(it.map(|tree| { - let mut expanded = FlatTree::to_subtree_resolved(tree, version, &span_data_table); - if self.needs_fixup_change() { - self.change_fixup_to_match_old_server(&mut expanded); - } - expanded - })), - Response::ExpandMacroExtended(it) => Ok(it.map(|resp| { - let mut expanded = FlatTree::to_subtree_resolved( - resp.tree, - version, - &deserialize_span_data_index_map(&resp.span_data_table), - ); - if self.needs_fixup_change() { - self.change_fixup_to_match_old_server(&mut expanded); - } - expanded - })), - _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), - } + def_site, + call_site, + mixed_site, + current_dir, + ) } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs index fcea75ef672a1..278d9cbcda468 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs @@ -12,13 +12,8 @@ use stdx::JodChild; use crate::{ ProcMacroKind, ServerError, - legacy_protocol::{ - json::{read_json, write_json}, - msg::{ - CURRENT_API_VERSION, Message, RUST_ANALYZER_SPAN_SUPPORT, Request, Response, - ServerConfig, SpanMode, - }, - }, + legacy_protocol::{self, SpanMode}, + version, }; /// Represents a process handling proc-macro communication. @@ -28,11 +23,16 @@ pub(crate) struct ProcMacroServerProcess { /// hence the lock on the state. state: Mutex, version: u32, - mode: SpanMode, + protocol: Protocol, /// Populated when the server exits. exited: OnceLock>, } +#[derive(Debug)] +enum Protocol { + LegacyJson { mode: SpanMode }, +} + /// Maintains the state of the proc-macro server process. #[derive(Debug)] struct ProcessSrvState { @@ -56,27 +56,26 @@ impl ProcMacroServerProcess { io::Result::Ok(ProcMacroServerProcess { state: Mutex::new(ProcessSrvState { process, stdin, stdout }), version: 0, - mode: SpanMode::Id, + protocol: Protocol::LegacyJson { mode: SpanMode::Id }, exited: OnceLock::new(), }) }; let mut srv = create_srv()?; tracing::info!("sending proc-macro server version check"); match srv.version_check() { - Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::other( - format!( "The version of the proc-macro server ({v}) in your Rust toolchain is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}). - This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain." - ), - )), + Ok(v) if v > version::CURRENT_API_VERSION => Err(io::Error::other( + format!( "The version of the proc-macro server ({v}) in your Rust toolchain is newer than the version supported by your rust-analyzer ({}). + This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain." + ,version::CURRENT_API_VERSION + ), + )), Ok(v) => { tracing::info!("Proc-macro server version: {v}"); srv.version = v; - if srv.version >= RUST_ANALYZER_SPAN_SUPPORT { - if let Ok(mode) = srv.enable_rust_analyzer_spans() { - srv.mode = mode; - } + if srv.version >= version::RUST_ANALYZER_SPAN_SUPPORT && let Ok(mode) = srv.enable_rust_analyzer_spans() { + srv.protocol = Protocol::LegacyJson { mode }; } - tracing::info!("Proc-macro server span mode: {:?}", srv.mode); + tracing::info!("Proc-macro server protocol: {:?}", srv.protocol); Ok(srv) } Err(e) => { @@ -98,25 +97,24 @@ impl ProcMacroServerProcess { self.version } + /// Enable support for rust-analyzer span mode if the server supports it. + pub(crate) fn rust_analyzer_spans(&self) -> bool { + match self.protocol { + Protocol::LegacyJson { mode } => mode == SpanMode::RustAnalyzer, + } + } + /// Checks the API version of the running proc-macro server. fn version_check(&self) -> Result { - let request = Request::ApiVersionCheck {}; - let response = self.send_task(request)?; - - match response { - Response::ApiVersionCheck(version) => Ok(version), - _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + match self.protocol { + Protocol::LegacyJson { .. } => legacy_protocol::version_check(self), } } /// Enable support for rust-analyzer span mode if the server supports it. fn enable_rust_analyzer_spans(&self) -> Result { - let request = Request::SetConfig(ServerConfig { span_mode: SpanMode::RustAnalyzer }); - let response = self.send_task(request)?; - - match response { - Response::SetConfig(ServerConfig { span_mode }) => Ok(span_mode), - _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + match self.protocol { + Protocol::LegacyJson { .. } => legacy_protocol::enable_rust_analyzer_spans(self), } } @@ -125,25 +123,24 @@ impl ProcMacroServerProcess { &self, dylib_path: &AbsPath, ) -> Result, String>, ServerError> { - let request = Request::ListMacros { dylib_path: dylib_path.to_path_buf().into() }; - - let response = self.send_task(request)?; - - match response { - Response::ListMacros(it) => Ok(it), - _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), + match self.protocol { + Protocol::LegacyJson { .. } => legacy_protocol::find_proc_macros(self, dylib_path), } } - /// Sends a request to the proc-macro server and waits for a response. - pub(crate) fn send_task(&self, req: Request) -> Result { - if let Some(server_error) = self.exited.get() { - return Err(server_error.0.clone()); - } - + pub(crate) fn send_task( + &self, + serialize_req: impl FnOnce( + &mut dyn Write, + &mut dyn BufRead, + Request, + &mut String, + ) -> Result, ServerError>, + req: Request, + ) -> Result { let state = &mut *self.state.lock().unwrap(); let mut buf = String::new(); - send_request(&mut state.stdin, &mut state.stdout, req, &mut buf) + serialize_req(&mut state.stdin, &mut state.stdout, req, &mut buf) .and_then(|res| { res.ok_or_else(|| { let message = "proc-macro server did not respond with data".to_owned(); @@ -162,10 +159,10 @@ impl ProcMacroServerProcess { Ok(None) | Err(_) => e, Ok(Some(status)) => { let mut msg = String::new(); - if !status.success() { - if let Some(stderr) = state.process.child.stderr.as_mut() { - _ = stderr.read_to_string(&mut msg); - } + if !status.success() + && let Some(stderr) = state.process.child.stderr.as_mut() + { + _ = stderr.read_to_string(&mut msg); } let server_error = ServerError { message: format!( @@ -242,21 +239,3 @@ fn mk_child<'a>( } cmd.spawn() } - -/// Sends a request to the server and reads the response. -fn send_request( - mut writer: &mut impl Write, - mut reader: &mut impl BufRead, - req: Request, - buf: &mut String, -) -> Result, ServerError> { - req.write(write_json, &mut writer).map_err(|err| ServerError { - message: "failed to write request".into(), - io: Some(Arc::new(err)), - })?; - let res = Response::read(read_json, &mut reader, buf).map_err(|err| ServerError { - message: "failed to read response".into(), - io: Some(Arc::new(err)), - })?; - Ok(res) -} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml index ab421021b8bfd..16ec3b0e2b2a8 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml @@ -14,6 +14,7 @@ publish = false proc-macro-srv.workspace = true proc-macro-api.workspace = true tt.workspace = true +clap = {version = "4.5.42", default-features = false, features = ["std"]} [features] sysroot-abi = ["proc-macro-srv/sysroot-abi"] diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs index c47ed053254bf..b6ebc562eac97 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs @@ -9,6 +9,7 @@ extern crate rustc_driver as _; #[cfg(any(feature = "sysroot-abi", rust_analyzer))] mod main_loop; +use clap::{Command, ValueEnum}; #[cfg(any(feature = "sysroot-abi", rust_analyzer))] use main_loop::run; @@ -23,12 +24,46 @@ fn main() -> std::io::Result<()> { ); std::process::exit(122); } + let matches = Command::new("proc-macro-srv") + .args(&[clap::Arg::new("format") + .long("format") + .action(clap::ArgAction::Set) + .default_value("json") + .value_parser(clap::builder::EnumValueParser::::new())]) + .get_matches(); + let &format = + matches.get_one::("format").expect("format value should always be present"); + run(format) +} + +#[derive(Copy, Clone)] +enum ProtocolFormat { + Json, + Postcard, +} - run() +impl ValueEnum for ProtocolFormat { + fn value_variants<'a>() -> &'a [Self] { + &[ProtocolFormat::Json] + } + + fn to_possible_value(&self) -> Option { + match self { + ProtocolFormat::Json => Some(clap::builder::PossibleValue::new("json")), + ProtocolFormat::Postcard => Some(clap::builder::PossibleValue::new("postcard")), + } + } + fn from_str(input: &str, _ignore_case: bool) -> Result { + match input { + "json" => Ok(ProtocolFormat::Json), + "postcard" => Ok(ProtocolFormat::Postcard), + _ => Err(format!("unknown protocol format: {input}")), + } + } } #[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))] -fn run() -> std::io::Result<()> { +fn run(_: ProtocolFormat) -> std::io::Result<()> { Err(std::io::Error::new( std::io::ErrorKind::Unsupported, "proc-macro-srv-cli needs to be compiled with the `sysroot-abi` feature to function" diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs index f54dff1f2d822..6bf58eef3bb99 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs @@ -1,16 +1,47 @@ //! The main loop of the proc-macro server. use std::io; -use proc_macro_api::legacy_protocol::{ - json::{read_json, write_json}, - msg::{ - self, CURRENT_API_VERSION, ExpandMacroData, ExpnGlobals, Message, SpanMode, TokenId, - deserialize_span_data_index_map, serialize_span_data_index_map, +use proc_macro_api::{ + legacy_protocol::{ + json::{read_json, write_json}, + msg::{ + self, ExpandMacroData, ExpnGlobals, Message, SpanMode, SpanTransformer, + deserialize_span_data_index_map, serialize_span_data_index_map, + }, }, + version::CURRENT_API_VERSION, }; -use proc_macro_srv::EnvSnapshot; +use proc_macro_srv::{EnvSnapshot, SpanId}; -pub(crate) fn run() -> io::Result<()> { +use crate::ProtocolFormat; + +struct SpanTrans; + +impl SpanTransformer for SpanTrans { + type Table = (); + type Span = SpanId; + fn token_id_of( + _: &mut Self::Table, + span: Self::Span, + ) -> proc_macro_api::legacy_protocol::SpanId { + proc_macro_api::legacy_protocol::SpanId(span.0 as u32) + } + fn span_for_token_id( + _: &Self::Table, + id: proc_macro_api::legacy_protocol::SpanId, + ) -> Self::Span { + SpanId(id.0 as u32) + } +} + +pub(crate) fn run(format: ProtocolFormat) -> io::Result<()> { + match format { + ProtocolFormat::Json => run_json(), + ProtocolFormat::Postcard => unimplemented!(), + } +} + +fn run_json() -> io::Result<()> { fn macro_kind_to_api(kind: proc_macro_srv::ProcMacroKind) -> proc_macro_api::ProcMacroKind { match kind { proc_macro_srv::ProcMacroKind::CustomDerive => { @@ -54,13 +85,14 @@ pub(crate) fn run() -> io::Result<()> { } = *task; match span_mode { SpanMode::Id => msg::Response::ExpandMacro({ - let def_site = TokenId(def_site as u32); - let call_site = TokenId(call_site as u32); - let mixed_site = TokenId(mixed_site as u32); + let def_site = SpanId(def_site as u32); + let call_site = SpanId(call_site as u32); + let mixed_site = SpanId(mixed_site as u32); - let macro_body = macro_body.to_subtree_unresolved(CURRENT_API_VERSION); - let attributes = - attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); + let macro_body = + macro_body.to_subtree_unresolved::(CURRENT_API_VERSION); + let attributes = attributes + .map(|it| it.to_subtree_unresolved::(CURRENT_API_VERSION)); srv.expand( lib, @@ -74,7 +106,10 @@ pub(crate) fn run() -> io::Result<()> { mixed_site, ) .map(|it| { - msg::FlatTree::new_raw(tt::SubtreeView::new(&it), CURRENT_API_VERSION) + msg::FlatTree::new_raw::( + tt::SubtreeView::new(&it), + CURRENT_API_VERSION, + ) }) .map_err(msg::PanicMessage) }), diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs index 223c5a54b7034..0f7c83979d563 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs @@ -41,10 +41,12 @@ use std::{ }; use paths::{Utf8Path, Utf8PathBuf}; -use span::{Span, TokenId}; +use span::Span; use crate::server_impl::TokenStream; +pub use crate::server_impl::token_id::SpanId; + #[derive(Copy, Clone, Eq, PartialEq, Debug)] pub enum ProcMacroKind { CustomDerive, @@ -159,8 +161,8 @@ pub trait ProcMacroSrvSpan: Copy + Send { fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server; } -impl ProcMacroSrvSpan for TokenId { - type Server = server_impl::token_id::TokenIdServer; +impl ProcMacroSrvSpan for SpanId { + type Server = server_impl::token_id::SpanIdServer; fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { Self::Server { call_site, def_site, mixed_site } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs index b493b325e830f..91e70ea243ae4 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs @@ -1,4 +1,4 @@ -//! proc-macro server backend based on [`proc_macro_api::msg::TokenId`] as the backing span. +//! proc-macro server backend based on [`proc_macro_api::msg::SpanId`] as the backing span. //! This backend is rather inflexible, used by RustRover and older rust-analyzer versions. use std::ops::{Bound, Range}; @@ -7,25 +7,34 @@ use proc_macro::bridge::{self, server}; use crate::server_impl::{from_token_tree, literal_from_str, token_stream::TokenStreamBuilder}; -type Span = span::TokenId; +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct SpanId(pub u32); + +impl std::fmt::Debug for SpanId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +type Span = SpanId; type TokenStream = crate::server_impl::TokenStream; pub struct FreeFunctions; -pub struct TokenIdServer { +pub struct SpanIdServer { pub call_site: Span, pub def_site: Span, pub mixed_site: Span, } -impl server::Types for TokenIdServer { +impl server::Types for SpanIdServer { type FreeFunctions = FreeFunctions; type TokenStream = TokenStream; type Span = Span; type Symbol = Symbol; } -impl server::FreeFunctions for TokenIdServer { +impl server::FreeFunctions for SpanIdServer { fn injected_env_var(&mut self, _: &str) -> Option { None } @@ -41,7 +50,7 @@ impl server::FreeFunctions for TokenIdServer { fn emit_diagnostic(&mut self, _: bridge::Diagnostic) {} } -impl server::TokenStream for TokenIdServer { +impl server::TokenStream for SpanIdServer { fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { stream.is_empty() } @@ -102,12 +111,12 @@ impl server::TokenStream for TokenIdServer { &mut self, stream: Self::TokenStream, ) -> Vec> { - // Can't join with `TokenId`. + // Can't join with `SpanId`. stream.into_bridge(&mut |first, _second| first) } } -impl server::Span for TokenIdServer { +impl server::Span for SpanIdServer { fn debug(&mut self, span: Self::Span) -> String { format!("{:?}", span.0) } @@ -174,14 +183,14 @@ impl server::Span for TokenIdServer { } } -impl server::Symbol for TokenIdServer { +impl server::Symbol for SpanIdServer { fn normalize_and_validate_ident(&mut self, string: &str) -> Result { // FIXME: nfc-normalize and validate idents Ok(::intern_symbol(string)) } } -impl server::Server for TokenIdServer { +impl server::Server for SpanIdServer { fn globals(&mut self) -> bridge::ExpnGlobals { bridge::ExpnGlobals { def_site: self.def_site, @@ -201,8 +210,6 @@ impl server::Server for TokenIdServer { #[cfg(test)] mod tests { - use span::TokenId; - use super::*; #[test] @@ -211,18 +218,18 @@ mod tests { token_trees: vec![ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym: Symbol::intern("struct"), - span: TokenId(0), + span: SpanId(0), is_raw: tt::IdentIsRaw::No, })), tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym: Symbol::intern("T"), - span: TokenId(0), + span: SpanId(0), is_raw: tt::IdentIsRaw::No, })), tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: TokenId(0), - close: TokenId(0), + open: SpanId(0), + close: SpanId(0), kind: tt::DelimiterKind::Brace, }, len: 0, @@ -238,8 +245,8 @@ mod tests { let subtree_paren_a = vec![ tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: TokenId(0), - close: TokenId(0), + open: SpanId(0), + close: SpanId(0), kind: tt::DelimiterKind::Parenthesis, }, len: 1, @@ -247,24 +254,24 @@ mod tests { tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { is_raw: tt::IdentIsRaw::No, sym: Symbol::intern("a"), - span: TokenId(0), + span: SpanId(0), })), ]; - let t1 = TokenStream::from_str("(a)", TokenId(0)).unwrap(); + let t1 = TokenStream::from_str("(a)", SpanId(0)).unwrap(); assert_eq!(t1.token_trees.len(), 2); assert!(t1.token_trees[0..2] == subtree_paren_a); - let t2 = TokenStream::from_str("(a);", TokenId(0)).unwrap(); + let t2 = TokenStream::from_str("(a);", SpanId(0)).unwrap(); assert_eq!(t2.token_trees.len(), 3); assert!(t2.token_trees[0..2] == subtree_paren_a); - let underscore = TokenStream::from_str("_", TokenId(0)).unwrap(); + let underscore = TokenStream::from_str("_", SpanId(0)).unwrap(); assert!( underscore.token_trees[0] == tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym: Symbol::intern("_"), - span: TokenId(0), + span: SpanId(0), is_raw: tt::IdentIsRaw::No, })) ); diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs index 10af5662b5c05..7aa38576bb52d 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs @@ -1,14 +1,12 @@ //! utils used in proc-macro tests use expect_test::Expect; -use span::{ - EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext, TokenId, -}; +use span::{EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext}; use tt::TextRange; -use crate::{EnvSnapshot, ProcMacroSrv, dylib, proc_macro_test_dylib_path}; +use crate::{EnvSnapshot, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path}; -fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream { +fn parse_string(call_site: SpanId, src: &str) -> crate::server_impl::TokenStream { crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree( syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src) .unwrap() @@ -59,9 +57,9 @@ fn assert_expand_impl( let path = proc_macro_test_dylib_path(); let expander = dylib::Expander::new(&path).unwrap(); - let def_site = TokenId(0); - let call_site = TokenId(1); - let mixed_site = TokenId(2); + let def_site = SpanId(0); + let call_site = SpanId(1); + let mixed_site = SpanId(2); let input_ts = parse_string(call_site, input).into_subtree(call_site); let attr_ts = attr.map(|attr| parse_string(call_site, attr).into_subtree(call_site)); let input_ts_string = format!("{input_ts:?}"); diff --git a/src/tools/rust-analyzer/crates/span/src/lib.rs b/src/tools/rust-analyzer/crates/span/src/lib.rs index b81d08eed6d88..ae9e038459e50 100644 --- a/src/tools/rust-analyzer/crates/span/src/lib.rs +++ b/src/tools/rust-analyzer/crates/span/src/lib.rs @@ -203,15 +203,3 @@ pub struct HirFileId(pub salsa::Id); /// `println!("Hello, {}", world)`. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct MacroCallId(pub salsa::Id); - -/// Legacy span type, only defined here as it is still used by the proc-macro server. -/// While rust-analyzer doesn't use this anymore at all, RustRover relies on the legacy type for -/// proc-macro expansion. -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -pub struct TokenId(pub u32); - -impl std::fmt::Debug for TokenId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.fmt(f) - } -} From d71e972414b8a1fe188a31ed96fecbd157c3ddcf Mon Sep 17 00:00:00 2001 From: Hmikihiro <34ttrweoewiwe28@gmail.com> Date: Wed, 30 Jul 2025 00:03:53 +0900 Subject: [PATCH 056/118] add `SyntaxEditor::delete_all` to migrate utils.rs `add_trait_assoc_items_to_impl` --- .../crates/ide-assists/src/utils.rs | 57 ++++++++++--------- .../crates/syntax/src/syntax_editor.rs | 10 ++++ .../crates/syntax/src/syntax_editor/edits.rs | 26 +++++++++ 3 files changed, 67 insertions(+), 26 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index 15c7a6a3fc266..c81a3a6f08d5a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -23,12 +23,11 @@ use syntax::{ ast::{ self, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace, edit::{AstNodeEdit, IndentLevel}, - edit_in_place::{AttrsOwnerEdit, Removable}, + edit_in_place::AttrsOwnerEdit, make, syntax_factory::SyntaxFactory, }, - syntax_editor::SyntaxEditor, - ted, + syntax_editor::{Removable, SyntaxEditor}, }; use crate::{ @@ -207,7 +206,7 @@ pub fn add_trait_assoc_items_to_impl( stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`"); } } - original_item.clone_for_update() + original_item } .reset_indent(); @@ -221,31 +220,37 @@ pub fn add_trait_assoc_items_to_impl( cloned_item.remove_attrs_and_docs(); cloned_item }) - .map(|item| { - match &item { - ast::AssocItem::Fn(fn_) if fn_.body().is_none() => { - let body = AstNodeEdit::indent( - &make::block_expr( - None, - Some(match config.expr_fill_default { - ExprFillDefaultMode::Todo => make::ext::expr_todo(), - ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), - ExprFillDefaultMode::Default => make::ext::expr_todo(), - }), - ), - IndentLevel::single(), - ); - ted::replace(fn_.get_or_create_body().syntax(), body.syntax()); - } - ast::AssocItem::TypeAlias(type_alias) => { - if let Some(type_bound_list) = type_alias.type_bound_list() { - type_bound_list.remove() - } + .filter_map(|item| match item { + ast::AssocItem::Fn(fn_) if fn_.body().is_none() => { + let fn_ = fn_.clone_subtree(); + let new_body = &make::block_expr( + None, + Some(match config.expr_fill_default { + ExprFillDefaultMode::Todo => make::ext::expr_todo(), + ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), + ExprFillDefaultMode::Default => make::ext::expr_todo(), + }), + ); + let new_body = AstNodeEdit::indent(new_body, IndentLevel::single()); + let mut fn_editor = SyntaxEditor::new(fn_.syntax().clone()); + fn_.replace_or_insert_body(&mut fn_editor, new_body); + let new_fn_ = fn_editor.finish().new_root().clone(); + ast::AssocItem::cast(new_fn_) + } + ast::AssocItem::TypeAlias(type_alias) => { + let type_alias = type_alias.clone_subtree(); + if let Some(type_bound_list) = type_alias.type_bound_list() { + let mut type_alias_editor = SyntaxEditor::new(type_alias.syntax().clone()); + type_bound_list.remove(&mut type_alias_editor); + let type_alias = type_alias_editor.finish().new_root().clone(); + ast::AssocItem::cast(type_alias) + } else { + Some(ast::AssocItem::TypeAlias(type_alias)) } - _ => {} } - AstNodeEdit::indent(&item, new_indent_level) + item => Some(item), }) + .map(|item| AstNodeEdit::indent(&item, new_indent_level)) .collect() } diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs index 5107754b18257..147b54c21a5f5 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs @@ -83,6 +83,16 @@ impl SyntaxEditor { self.changes.push(Change::Replace(element.syntax_element(), None)); } + pub fn delete_all(&mut self, range: RangeInclusive) { + if range.start() == range.end() { + self.delete(range.start()); + return; + } + + debug_assert!(is_ancestor_or_self_of_element(range.start(), &self.root)); + self.changes.push(Change::ReplaceAll(range, Vec::new())) + } + pub fn replace(&mut self, old: impl Element, new: impl Element) { let old = old.syntax_element(); debug_assert!(is_ancestor_or_self_of_element(&old, &self.root)); diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs index 840e76979792d..9090f7c9eb149 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs @@ -153,6 +153,23 @@ impl ast::VariantList { } } +impl ast::Fn { + pub fn replace_or_insert_body(&self, editor: &mut SyntaxEditor, body: ast::BlockExpr) { + if let Some(old_body) = self.body() { + editor.replace(old_body.syntax(), body.syntax()); + } else { + let single_space = make::tokens::single_space(); + let elements = vec![single_space.into(), body.syntax().clone().into()]; + + if let Some(semicolon) = self.semicolon_token() { + editor.replace_with_many(semicolon, elements); + } else { + editor.insert_all(Position::last_child_of(self.syntax()), elements); + } + } + } +} + fn normalize_ws_between_braces(editor: &mut SyntaxEditor, node: &SyntaxNode) -> Option<()> { let make = SyntaxFactory::without_mappings(); let l = node @@ -184,6 +201,15 @@ pub trait Removable: AstNode { fn remove(&self, editor: &mut SyntaxEditor); } +impl Removable for ast::TypeBoundList { + fn remove(&self, editor: &mut SyntaxEditor) { + match self.syntax().siblings_with_tokens(Direction::Prev).find(|it| it.kind() == T![:]) { + Some(colon) => editor.delete_all(colon..=self.syntax().clone().into()), + None => editor.delete(self.syntax()), + } + } +} + impl Removable for ast::Use { fn remove(&self, editor: &mut SyntaxEditor) { let make = SyntaxFactory::without_mappings(); From 897168169d3372056988ba5f37f2210855bc7c4a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 31 Jul 2025 09:26:05 +0200 Subject: [PATCH 057/118] Properly clean proc-macro-srv proc-macro temp dir --- src/tools/rust-analyzer/Cargo.lock | 84 +++++++++++++++++++ .../crates/proc-macro-srv-cli/Cargo.toml | 3 + .../crates/proc-macro-srv-cli/src/main.rs | 3 + .../proc-macro-srv-cli/src/main_loop.rs | 5 +- .../crates/proc-macro-srv/Cargo.toml | 2 + .../crates/proc-macro-srv/src/dylib.rs | 22 +++-- .../crates/proc-macro-srv/src/lib.rs | 22 +++-- .../crates/proc-macro-srv/src/tests/utils.rs | 2 +- 8 files changed, 125 insertions(+), 18 deletions(-) diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 0cbbb5dd6de77..53c2d044bbd27 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -50,6 +50,15 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +[[package]] +name = "atomic-polyfill" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cf2bce30dfe09ef0bfaef228b9d414faaf7e563035494d7fe092dba54b300f4" +dependencies = [ + "critical-section", +] + [[package]] name = "autocfg" version = "1.4.0" @@ -125,6 +134,12 @@ version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26c4925bc979b677330a8c7fe7a8c94af2dbb4a2d37b4a20a80d884400f46baa" +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + [[package]] name = "camino" version = "1.1.10" @@ -318,6 +333,15 @@ version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" +[[package]] +name = "cobs" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa961b519f0b462e3a3b4a34b64d119eeaca1d59af726fe450bbba07a9fc0a1" +dependencies = [ + "thiserror 2.0.12", +] + [[package]] name = "countme" version = "3.0.1" @@ -339,6 +363,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "critical-section" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" + [[package]] name = "crossbeam-channel" version = "0.5.15" @@ -596,6 +626,15 @@ version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +[[package]] +name = "hash32" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67" +dependencies = [ + "byteorder", +] + [[package]] name = "hashbrown" version = "0.14.5" @@ -622,6 +661,20 @@ dependencies = [ "hashbrown 0.15.4", ] +[[package]] +name = "heapless" +version = "0.7.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f" +dependencies = [ + "atomic-polyfill", + "hash32", + "rustc_version", + "serde", + "spin", + "stable_deref_trait", +] + [[package]] name = "hermit-abi" version = "0.5.2" @@ -1592,6 +1645,17 @@ version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" +[[package]] +name = "postcard" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6764c3b5dd454e283a30e6dfe78e9b31096d9e32036b5d1eaac7a6119ccb9a24" +dependencies = [ + "cobs", + "heapless", + "serde", +] + [[package]] name = "potential_utf" version = "0.1.2" @@ -1639,6 +1703,7 @@ dependencies = [ "ra-ap-rustc_lexer 0.122.0", "span", "syntax-bridge", + "temp-dir", "tt", ] @@ -1647,6 +1712,7 @@ name = "proc-macro-srv-cli" version = "0.0.0" dependencies = [ "clap", + "postcard", "proc-macro-api", "proc-macro-srv", "tt", @@ -2023,6 +2089,15 @@ dependencies = [ "smallvec", ] +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + [[package]] name = "ryu" version = "1.0.20" @@ -2240,6 +2315,15 @@ dependencies = [ "vfs", ] +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + [[package]] name = "stable_deref_trait" version = "1.2.0" diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml index 16ec3b0e2b2a8..91e9e62b084b4 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml @@ -15,10 +15,13 @@ proc-macro-srv.workspace = true proc-macro-api.workspace = true tt.workspace = true clap = {version = "4.5.42", default-features = false, features = ["std"]} +postcard = { version = "1.1.3", optional = true } [features] +default = ["postcard"] sysroot-abi = ["proc-macro-srv/sysroot-abi"] in-rust-tree = ["proc-macro-srv/in-rust-tree", "sysroot-abi"] +postcard = ["dep:postcard"] [[bin]] diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs index b6ebc562eac97..97a622e453dd1 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs @@ -39,6 +39,7 @@ fn main() -> std::io::Result<()> { #[derive(Copy, Clone)] enum ProtocolFormat { Json, + #[cfg(feature = "postcard")] Postcard, } @@ -50,12 +51,14 @@ impl ValueEnum for ProtocolFormat { fn to_possible_value(&self) -> Option { match self { ProtocolFormat::Json => Some(clap::builder::PossibleValue::new("json")), + #[cfg(feature = "postcard")] ProtocolFormat::Postcard => Some(clap::builder::PossibleValue::new("postcard")), } } fn from_str(input: &str, _ignore_case: bool) -> Result { match input { "json" => Ok(ProtocolFormat::Json), + #[cfg(feature = "postcard")] "postcard" => Ok(ProtocolFormat::Postcard), _ => Err(format!("unknown protocol format: {input}")), } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs index 6bf58eef3bb99..46be8a21004d9 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs @@ -37,6 +37,7 @@ impl SpanTransformer for SpanTrans { pub(crate) fn run(format: ProtocolFormat) -> io::Result<()> { match format { ProtocolFormat::Json => run_json(), + #[cfg(feature = "postcard")] ProtocolFormat::Postcard => unimplemented!(), } } @@ -96,7 +97,7 @@ fn run_json() -> io::Result<()> { srv.expand( lib, - env, + &env, current_dir, macro_name, macro_body, @@ -127,7 +128,7 @@ fn run_json() -> io::Result<()> { }); srv.expand( lib, - env, + &env, current_dir, macro_name, macro_body, diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml index 4034f244393bf..d037e715e703f 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml @@ -16,6 +16,7 @@ doctest = false object.workspace = true libloading.workspace = true memmap2.workspace = true +temp-dir.workspace = true tt.workspace = true syntax-bridge.workspace = true @@ -26,6 +27,7 @@ intern.workspace = true ra-ap-rustc_lexer.workspace = true + [target.'cfg(unix)'.dependencies] libc.workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs index c49159df9916d..095e9fa2e98e0 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs @@ -4,6 +4,7 @@ mod version; use proc_macro::bridge; use std::{fmt, fs, io, time::SystemTime}; +use temp_dir::TempDir; use libloading::Library; use object::Object; @@ -141,13 +142,16 @@ pub(crate) struct Expander { } impl Expander { - pub(crate) fn new(lib: &Utf8Path) -> Result { + pub(crate) fn new( + temp_dir: &TempDir, + lib: &Utf8Path, + ) -> Result { // Some libraries for dynamic loading require canonicalized path even when it is // already absolute let lib = lib.canonicalize_utf8()?; let modified_time = fs::metadata(&lib).and_then(|it| it.modified())?; - let path = ensure_file_with_lock_free_access(&lib)?; + let path = ensure_file_with_lock_free_access(temp_dir, &lib)?; let library = ProcMacroLibrary::open(path.as_ref())?; Ok(Expander { inner: library, _remove_on_drop: RemoveFileOnDrop(path), modified_time }) @@ -221,7 +225,10 @@ impl Drop for RemoveFileOnDrop { /// Copy the dylib to temp directory to prevent locking in Windows #[cfg(windows)] -fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result { +fn ensure_file_with_lock_free_access( + temp_dir: &TempDir, + path: &Utf8Path, +) -> io::Result { use std::collections::hash_map::RandomState; use std::hash::{BuildHasher, Hasher}; @@ -229,9 +236,7 @@ fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result return Ok(path.to_path_buf()); } - let mut to = Utf8PathBuf::from_path_buf(std::env::temp_dir()).unwrap(); - to.push("rust-analyzer-proc-macros"); - _ = fs::create_dir(&to); + let mut to = Utf8Path::from_path(temp_dir.path()).unwrap().to_owned(); let file_name = path.file_stem().ok_or_else(|| { io::Error::new(io::ErrorKind::InvalidInput, format!("File path is invalid: {path}")) @@ -248,6 +253,9 @@ fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result } #[cfg(unix)] -fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result { +fn ensure_file_with_lock_free_access( + _temp_dir: &TempDir, + path: &Utf8Path, +) -> io::Result { Ok(path.to_owned()) } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs index 0f7c83979d563..29fe5aed2b1fd 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs @@ -42,6 +42,7 @@ use std::{ use paths::{Utf8Path, Utf8PathBuf}; use span::Span; +use temp_dir::TempDir; use crate::server_impl::TokenStream; @@ -59,11 +60,16 @@ pub const RUSTC_VERSION_STRING: &str = env!("RUSTC_VERSION"); pub struct ProcMacroSrv<'env> { expanders: Mutex>>, env: &'env EnvSnapshot, + temp_dir: TempDir, } impl<'env> ProcMacroSrv<'env> { pub fn new(env: &'env EnvSnapshot) -> Self { - Self { expanders: Default::default(), env } + Self { + expanders: Default::default(), + env, + temp_dir: TempDir::with_prefix("proc-macro-srv").unwrap(), + } } } @@ -73,7 +79,7 @@ impl ProcMacroSrv<'_> { pub fn expand( &self, lib: impl AsRef, - env: Vec<(String, String)>, + env: &[(String, String)], current_dir: Option>, macro_name: String, macro_body: tt::TopSubtree, @@ -131,7 +137,7 @@ impl ProcMacroSrv<'_> { fn expander(&self, path: &Utf8Path) -> Result, String> { let expander = || { - let expander = dylib::Expander::new(path) + let expander = dylib::Expander::new(&self.temp_dir, path) .map_err(|err| format!("Cannot create expander for {path}: {err}",)); expander.map(Arc::new) }; @@ -203,7 +209,7 @@ impl Default for EnvSnapshot { static ENV_LOCK: std::sync::Mutex<()> = std::sync::Mutex::new(()); struct EnvChange<'snap> { - changed_vars: Vec, + changed_vars: Vec<&'snap str>, prev_working_dir: Option, snap: &'snap EnvSnapshot, _guard: std::sync::MutexGuard<'snap, ()>, @@ -212,7 +218,7 @@ struct EnvChange<'snap> { impl<'snap> EnvChange<'snap> { fn apply( snap: &'snap EnvSnapshot, - new_vars: Vec<(String, String)>, + new_vars: &'snap [(String, String)], current_dir: Option<&Path>, ) -> EnvChange<'snap> { let guard = ENV_LOCK.lock().unwrap_or_else(std::sync::PoisonError::into_inner); @@ -232,11 +238,11 @@ impl<'snap> EnvChange<'snap> { EnvChange { snap, changed_vars: new_vars - .into_iter() + .iter() .map(|(k, v)| { // SAFETY: We have acquired the environment lock - unsafe { env::set_var(&k, v) }; - k + unsafe { env::set_var(k, v) }; + &**k }) .collect(), prev_working_dir, diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs index 7aa38576bb52d..f5a76e30bbcba 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs @@ -55,7 +55,7 @@ fn assert_expand_impl( expect_spanned: Expect, ) { let path = proc_macro_test_dylib_path(); - let expander = dylib::Expander::new(&path).unwrap(); + let expander = dylib::Expander::new(&temp_dir::TempDir::new().unwrap(), &path).unwrap(); let def_site = SpanId(0); let call_site = SpanId(1); From 1975c98b73fd8ed79792557605f76ce053710dd8 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 31 Jul 2025 09:53:26 +0200 Subject: [PATCH 058/118] Reorganize proc-macro-srv --- .../proc-macro-srv-cli/src/main_loop.rs | 10 +- .../crates/proc-macro-srv/src/dylib.rs | 203 +++++++++--------- .../src/{ => dylib}/proc_macros.rs | 23 +- .../crates/proc-macro-srv/src/lib.rs | 41 ++-- .../crates/proc-macro-srv/src/server_impl.rs | 2 +- 5 files changed, 131 insertions(+), 148 deletions(-) rename src/tools/rust-analyzer/crates/proc-macro-srv/src/{ => dylib}/proc_macros.rs (81%) diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs index 46be8a21004d9..703bc965db25c 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs @@ -24,13 +24,13 @@ impl SpanTransformer for SpanTrans { _: &mut Self::Table, span: Self::Span, ) -> proc_macro_api::legacy_protocol::SpanId { - proc_macro_api::legacy_protocol::SpanId(span.0 as u32) + proc_macro_api::legacy_protocol::SpanId(span.0) } fn span_for_token_id( _: &Self::Table, id: proc_macro_api::legacy_protocol::SpanId, ) -> Self::Span { - SpanId(id.0 as u32) + SpanId(id.0) } } @@ -99,7 +99,7 @@ fn run_json() -> io::Result<()> { lib, &env, current_dir, - macro_name, + ¯o_name, macro_body, attributes, def_site, @@ -112,6 +112,7 @@ fn run_json() -> io::Result<()> { CURRENT_API_VERSION, ) }) + .map_err(|e| e.into_string().unwrap_or_default()) .map_err(msg::PanicMessage) }), SpanMode::RustAnalyzer => msg::Response::ExpandMacroExtended({ @@ -130,7 +131,7 @@ fn run_json() -> io::Result<()> { lib, &env, current_dir, - macro_name, + ¯o_name, macro_body, attributes, def_site, @@ -151,6 +152,7 @@ fn run_json() -> io::Result<()> { tree, span_data_table, }) + .map_err(|e| e.into_string().unwrap_or_default()) .map_err(msg::PanicMessage) }), } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs index 095e9fa2e98e0..c8513a10675da 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs @@ -1,5 +1,6 @@ //! Handles dynamic library loading for proc macro +mod proc_macros; mod version; use proc_macro::bridge; @@ -10,57 +11,56 @@ use libloading::Library; use object::Object; use paths::{Utf8Path, Utf8PathBuf}; -use crate::{ProcMacroKind, ProcMacroSrvSpan, proc_macros::ProcMacros, server_impl::TopSubtree}; +use crate::{ + PanicMessage, ProcMacroKind, ProcMacroSrvSpan, dylib::proc_macros::ProcMacros, + server_impl::TopSubtree, +}; -/// Loads dynamic library in platform dependent manner. -/// -/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described -/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample) -/// and [here](https://github.com/rust-lang/rust/issues/60593). -/// -/// Usage of RTLD_DEEPBIND -/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1) -/// -/// It seems that on Windows that behaviour is default, so we do nothing in that case. -/// -/// # Safety -/// -/// The caller is responsible for ensuring that the path is valid proc-macro library -#[cfg(windows)] -unsafe fn load_library(file: &Utf8Path) -> Result { - // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library - unsafe { Library::new(file) } +pub(crate) struct Expander { + inner: ProcMacroLibrary, + modified_time: SystemTime, } -/// Loads dynamic library in platform dependent manner. -/// -/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described -/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample) -/// and [here](https://github.com/rust-lang/rust/issues/60593). -/// -/// Usage of RTLD_DEEPBIND -/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1) -/// -/// It seems that on Windows that behaviour is default, so we do nothing in that case. -/// -/// # Safety -/// -/// The caller is responsible for ensuring that the path is valid proc-macro library -#[cfg(unix)] -unsafe fn load_library(file: &Utf8Path) -> Result { - // not defined by POSIX, different values on mips vs other targets - #[cfg(target_env = "gnu")] - use libc::RTLD_DEEPBIND; - use libloading::os::unix::Library as UnixLibrary; - // defined by POSIX - use libloading::os::unix::RTLD_NOW; +impl Expander { + pub(crate) fn new( + temp_dir: &TempDir, + lib: &Utf8Path, + ) -> Result { + // Some libraries for dynamic loading require canonicalized path even when it is + // already absolute + let lib = lib.canonicalize_utf8()?; + let modified_time = fs::metadata(&lib).and_then(|it| it.modified())?; - // MUSL and bionic don't have it.. - #[cfg(not(target_env = "gnu"))] - const RTLD_DEEPBIND: std::os::raw::c_int = 0x0; + let path = ensure_file_with_lock_free_access(temp_dir, &lib)?; + let library = ProcMacroLibrary::open(path.as_ref())?; - // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library - unsafe { UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into()) } + Ok(Expander { inner: library, modified_time }) + } + + pub(crate) fn expand( + &self, + macro_name: &str, + macro_body: TopSubtree, + attributes: Option>, + def_site: S, + call_site: S, + mixed_site: S, + ) -> Result, PanicMessage> + where + ::TokenStream: Default, + { + self.inner + .proc_macros + .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site) + } + + pub(crate) fn list_macros(&self) -> impl Iterator { + self.inner.proc_macros.list_macros() + } + + pub(crate) fn modified_time(&self) -> SystemTime { + self.modified_time + } } #[derive(Debug)] @@ -134,57 +134,6 @@ impl ProcMacroLibrary { } } -// Drop order matters as we can't remove the dylib before the library is unloaded -pub(crate) struct Expander { - inner: ProcMacroLibrary, - _remove_on_drop: RemoveFileOnDrop, - modified_time: SystemTime, -} - -impl Expander { - pub(crate) fn new( - temp_dir: &TempDir, - lib: &Utf8Path, - ) -> Result { - // Some libraries for dynamic loading require canonicalized path even when it is - // already absolute - let lib = lib.canonicalize_utf8()?; - let modified_time = fs::metadata(&lib).and_then(|it| it.modified())?; - - let path = ensure_file_with_lock_free_access(temp_dir, &lib)?; - let library = ProcMacroLibrary::open(path.as_ref())?; - - Ok(Expander { inner: library, _remove_on_drop: RemoveFileOnDrop(path), modified_time }) - } - - pub(crate) fn expand( - &self, - macro_name: &str, - macro_body: TopSubtree, - attributes: Option>, - def_site: S, - call_site: S, - mixed_site: S, - ) -> Result, String> - where - ::TokenStream: Default, - { - let result = self - .inner - .proc_macros - .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site); - result.map_err(|e| e.into_string().unwrap_or_default()) - } - - pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { - self.inner.proc_macros.list_macros() - } - - pub(crate) fn modified_time(&self) -> SystemTime { - self.modified_time - } -} - fn invalid_data_err(e: impl Into>) -> io::Error { io::Error::new(io::ErrorKind::InvalidData, e) } @@ -214,15 +163,6 @@ fn find_registrar_symbol(obj: &object::File<'_>) -> object::Result io::Result { Ok(path.to_owned()) } + +/// Loads dynamic library in platform dependent manner. +/// +/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described +/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample) +/// and [here](https://github.com/rust-lang/rust/issues/60593). +/// +/// Usage of RTLD_DEEPBIND +/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1) +/// +/// It seems that on Windows that behaviour is default, so we do nothing in that case. +/// +/// # Safety +/// +/// The caller is responsible for ensuring that the path is valid proc-macro library +#[cfg(windows)] +unsafe fn load_library(file: &Utf8Path) -> Result { + // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library + unsafe { Library::new(file) } +} + +/// Loads dynamic library in platform dependent manner. +/// +/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described +/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample) +/// and [here](https://github.com/rust-lang/rust/issues/60593). +/// +/// Usage of RTLD_DEEPBIND +/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1) +/// +/// It seems that on Windows that behaviour is default, so we do nothing in that case. +/// +/// # Safety +/// +/// The caller is responsible for ensuring that the path is valid proc-macro library +#[cfg(unix)] +unsafe fn load_library(file: &Utf8Path) -> Result { + // not defined by POSIX, different values on mips vs other targets + #[cfg(target_env = "gnu")] + use libc::RTLD_DEEPBIND; + use libloading::os::unix::Library as UnixLibrary; + // defined by POSIX + use libloading::os::unix::RTLD_NOW; + + // MUSL and bionic don't have it.. + #[cfg(not(target_env = "gnu"))] + const RTLD_DEEPBIND: std::os::raw::c_int = 0x0; + + // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library + unsafe { UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into()) } +} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/proc_macros.rs similarity index 81% rename from src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs rename to src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/proc_macros.rs index 18532706c4aaa..9b5721e370ace 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/proc_macros.rs @@ -75,20 +75,13 @@ impl ProcMacros { Err(bridge::PanicMessage::String(format!("proc-macro `{macro_name}` is missing")).into()) } - pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { - self.0 - .iter() - .map(|proc_macro| match proc_macro { - bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { - (trait_name.to_string(), ProcMacroKind::CustomDerive) - } - bridge::client::ProcMacro::Bang { name, .. } => { - (name.to_string(), ProcMacroKind::Bang) - } - bridge::client::ProcMacro::Attr { name, .. } => { - (name.to_string(), ProcMacroKind::Attr) - } - }) - .collect() + pub(crate) fn list_macros(&self) -> impl Iterator { + self.0.iter().map(|proc_macro| match *proc_macro { + bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { + (trait_name, ProcMacroKind::CustomDerive) + } + bridge::client::ProcMacro::Bang { name, .. } => (name, ProcMacroKind::Bang), + bridge::client::ProcMacro::Attr { name, .. } => (name, ProcMacroKind::Attr), + }) } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs index 29fe5aed2b1fd..cb97882c58541 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs @@ -27,7 +27,6 @@ extern crate ra_ap_rustc_lexer as rustc_lexer; extern crate rustc_lexer; mod dylib; -mod proc_macros; mod server_impl; use std::{ @@ -81,16 +80,17 @@ impl ProcMacroSrv<'_> { lib: impl AsRef, env: &[(String, String)], current_dir: Option>, - macro_name: String, + macro_name: &str, macro_body: tt::TopSubtree, attribute: Option>, def_site: S, call_site: S, mixed_site: S, - ) -> Result>, String> { + ) -> Result>, PanicMessage> { let snapped_env = self.env; - let expander = - self.expander(lib.as_ref()).map_err(|err| format!("failed to load macro: {err}"))?; + let expander = self.expander(lib.as_ref()).map_err(|err| PanicMessage { + message: Some(format!("failed to load macro: {err}")), + })?; let prev_env = EnvChange::apply(snapped_env, env, current_dir.as_ref().map(<_>::as_ref)); @@ -99,11 +99,11 @@ impl ProcMacroSrv<'_> { let result = thread::scope(|s| { let thread = thread::Builder::new() .stack_size(EXPANDER_STACK_SIZE) - .name(macro_name.clone()) + .name(macro_name.to_owned()) .spawn_scoped(s, move || { expander .expand( - ¯o_name, + macro_name, server_impl::TopSubtree(macro_body.0.into_vec()), attribute.map(|it| server_impl::TopSubtree(it.0.into_vec())), def_site, @@ -112,12 +112,7 @@ impl ProcMacroSrv<'_> { ) .map(|tt| tt.0) }); - let res = match thread { - Ok(handle) => handle.join(), - Err(e) => return Err(e.to_string()), - }; - - match res { + match thread.unwrap().join() { Ok(res) => res, Err(e) => std::panic::resume_unwind(e), } @@ -132,7 +127,7 @@ impl ProcMacroSrv<'_> { dylib_path: &Utf8Path, ) -> Result, String> { let expander = self.expander(dylib_path)?; - Ok(expander.list_macros()) + Ok(expander.list_macros().map(|(k, v)| (k.to_owned(), v)).collect()) } fn expander(&self, path: &Utf8Path) -> Result, String> { @@ -186,6 +181,8 @@ impl ProcMacroSrvSpan for Span { } } } + +#[derive(Debug, Clone)] pub struct PanicMessage { message: Option, } @@ -265,14 +262,14 @@ impl Drop for EnvChange<'_> { } } - if let Some(dir) = &self.prev_working_dir { - if let Err(err) = std::env::set_current_dir(dir) { - eprintln!( - "Failed to set the current working dir to {}. Error: {:?}", - dir.display(), - err - ) - } + if let Some(dir) = &self.prev_working_dir + && let Err(err) = std::env::set_current_dir(dir) + { + eprintln!( + "Failed to set the current working dir to {}. Error: {:?}", + dir.display(), + err + ) } } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs index 662f6257642f0..32ad32731ba6c 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl.rs @@ -209,7 +209,7 @@ pub(super) fn from_token_tree( token_trees.push(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { spacing: tt::Spacing::Alone, span: literal.span, - char: '-' as char, + char: '-', }))); symbol = Symbol::intern(&symbol.as_str()[1..]); } From 7543395f9540a1d6f9e1f9e390b457e3cfe5ee04 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 31 Jul 2025 10:04:42 +0200 Subject: [PATCH 059/118] Add version command to proc-macro-srv --- .../crates/proc-macro-api/src/process.rs | 27 +++++---- .../crates/proc-macro-srv-cli/build.rs | 48 ++++++++++++++- .../crates/proc-macro-srv-cli/src/main.rs | 24 ++++++-- .../crates/proc-macro-srv-cli/src/version.rs | 58 +++++++++++++++++++ 4 files changed, 139 insertions(+), 18 deletions(-) create mode 100644 src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/version.rs diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs index 278d9cbcda468..fe274a027a80f 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs @@ -63,16 +63,25 @@ impl ProcMacroServerProcess { let mut srv = create_srv()?; tracing::info!("sending proc-macro server version check"); match srv.version_check() { - Ok(v) if v > version::CURRENT_API_VERSION => Err(io::Error::other( - format!( "The version of the proc-macro server ({v}) in your Rust toolchain is newer than the version supported by your rust-analyzer ({}). - This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain." - ,version::CURRENT_API_VERSION - ), - )), + Ok(v) if v > version::CURRENT_API_VERSION => { + #[allow(clippy::disallowed_methods)] + let process_version = Command::new(process_path) + .arg("--version") + .output() + .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_owned()) + .unwrap_or_else(|_| "unknown version".to_owned()); + Err(io::Error::other(format!( + "Your installed proc-macro server is too new for your rust-analyzer. API version: {}, server version: {process_version}. \ + This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain.", + version::CURRENT_API_VERSION + ))) + } Ok(v) => { tracing::info!("Proc-macro server version: {v}"); srv.version = v; - if srv.version >= version::RUST_ANALYZER_SPAN_SUPPORT && let Ok(mode) = srv.enable_rust_analyzer_spans() { + if srv.version >= version::RUST_ANALYZER_SPAN_SUPPORT + && let Ok(mode) = srv.enable_rust_analyzer_spans() + { srv.protocol = Protocol::LegacyJson { mode }; } tracing::info!("Proc-macro server protocol: {:?}", srv.protocol); @@ -80,9 +89,7 @@ impl ProcMacroServerProcess { } Err(e) => { tracing::info!(%e, "proc-macro version check failed"); - Err( - io::Error::other(format!("proc-macro server version check failed: {e}")), - ) + Err(io::Error::other(format!("proc-macro server version check failed: {e}"))) } } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/build.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/build.rs index 07f914fece0e2..12e7c8b05bac3 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/build.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/build.rs @@ -1,5 +1,49 @@ -//! This teaches cargo about our cfg(rust_analyzer) +//! Construct version in the `commit-hash date channel` format + +use std::{env, path::PathBuf, process::Command}; fn main() { - println!("cargo:rustc-check-cfg=cfg(rust_analyzer)"); + set_rerun(); + set_commit_info(); + println!("cargo::rustc-check-cfg=cfg(rust_analyzer)"); +} + +fn set_rerun() { + println!("cargo:rerun-if-env-changed=CFG_RELEASE"); + + let mut manifest_dir = PathBuf::from( + env::var("CARGO_MANIFEST_DIR").expect("`CARGO_MANIFEST_DIR` is always set by cargo."), + ); + + while manifest_dir.parent().is_some() { + let head_ref = manifest_dir.join(".git/HEAD"); + if head_ref.exists() { + println!("cargo:rerun-if-changed={}", head_ref.display()); + return; + } + + manifest_dir.pop(); + } + + println!("cargo:warning=Could not find `.git/HEAD` from manifest dir!"); +} + +fn set_commit_info() { + #[allow(clippy::disallowed_methods)] + let output = match Command::new("git") + .arg("log") + .arg("-1") + .arg("--date=short") + .arg("--format=%H %h %cd") + .output() + { + Ok(output) if output.status.success() => output, + _ => return, + }; + let stdout = String::from_utf8(output.stdout).unwrap(); + let mut parts = stdout.split_whitespace(); + let mut next = || parts.next().unwrap(); + println!("cargo:rustc-env=RA_COMMIT_HASH={}", next()); + println!("cargo:rustc-env=RA_COMMIT_SHORT_HASH={}", next()); + println!("cargo:rustc-env=RA_COMMIT_DATE={}", next()) } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs index 97a622e453dd1..662d34865effe 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs @@ -2,11 +2,13 @@ //! Driver for proc macro server #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![cfg_attr(not(feature = "sysroot-abi"), allow(unused_crate_dependencies))] -#![allow(clippy::print_stderr)] +#![allow(clippy::print_stdout, clippy::print_stderr)] #[cfg(feature = "in-rust-tree")] extern crate rustc_driver as _; +mod version; + #[cfg(any(feature = "sysroot-abi", rust_analyzer))] mod main_loop; use clap::{Command, ValueEnum}; @@ -25,12 +27,22 @@ fn main() -> std::io::Result<()> { std::process::exit(122); } let matches = Command::new("proc-macro-srv") - .args(&[clap::Arg::new("format") - .long("format") - .action(clap::ArgAction::Set) - .default_value("json") - .value_parser(clap::builder::EnumValueParser::::new())]) + .args(&[ + clap::Arg::new("format") + .long("format") + .action(clap::ArgAction::Set) + .default_value("json") + .value_parser(clap::builder::EnumValueParser::::new()), + clap::Arg::new("version") + .long("version") + .action(clap::ArgAction::SetTrue) + .help("Prints the version of the proc-macro-srv"), + ]) .get_matches(); + if matches.get_flag("version") { + println!("rust-analyzer-proc-macro-srv {}", version::version()); + return Ok(()); + } let &format = matches.get_one::("format").expect("format value should always be present"); run(format) diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/version.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/version.rs new file mode 100644 index 0000000000000..32499d055d1e0 --- /dev/null +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/version.rs @@ -0,0 +1,58 @@ +//! Code for representing rust-analyzer's release version number. +#![expect(dead_code)] + +use std::fmt; + +/// Information about the git repository where rust-analyzer was built from. +pub(crate) struct CommitInfo { + pub(crate) short_commit_hash: &'static str, + pub(crate) commit_hash: &'static str, + pub(crate) commit_date: &'static str, +} + +/// Cargo's version. +pub(crate) struct VersionInfo { + /// rust-analyzer's version, such as "1.57.0", "1.58.0-beta.1", "1.59.0-nightly", etc. + pub(crate) version: &'static str, + /// The release channel we were built for (stable/beta/nightly/dev). + /// + /// `None` if not built via bootstrap. + pub(crate) release_channel: Option<&'static str>, + /// Information about the Git repository we may have been built from. + /// + /// `None` if not built from a git repo. + pub(crate) commit_info: Option, +} + +impl fmt::Display for VersionInfo { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.version)?; + + if let Some(ci) = &self.commit_info { + write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?; + }; + Ok(()) + } +} + +/// Returns information about cargo's version. +pub(crate) const fn version() -> VersionInfo { + let version = match option_env!("CFG_RELEASE") { + Some(x) => x, + None => "0.0.0", + }; + + let release_channel = option_env!("CFG_RELEASE_CHANNEL"); + let commit_info = match ( + option_env!("RA_COMMIT_SHORT_HASH"), + option_env!("RA_COMMIT_HASH"), + option_env!("RA_COMMIT_DATE"), + ) { + (Some(short_commit_hash), Some(commit_hash), Some(commit_date)) => { + Some(CommitInfo { short_commit_hash, commit_hash, commit_date }) + } + _ => None, + }; + + VersionInfo { version, release_channel, commit_info } +} From a7b01aa04867ba0ee10f767f3819d47c45d9cc30 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 31 Jul 2025 10:30:22 +0200 Subject: [PATCH 060/118] `cargo clippy --fix` --- .../rust-analyzer/crates/cfg/src/cfg_expr.rs | 8 +- .../rust-analyzer/crates/hir-def/src/attr.rs | 8 +- .../crates/hir-def/src/expr_store/lower.rs | 39 +++-- .../hir-def/src/expr_store/lower/asm.rs | 8 +- .../hir-def/src/expr_store/lower/path.rs | 21 +-- .../crates/hir-def/src/expr_store/pretty.rs | 12 +- .../crates/hir-def/src/find_path.rs | 72 ++++---- .../crates/hir-def/src/item_scope.rs | 9 +- .../crates/hir-def/src/item_tree/lower.rs | 23 ++- .../crates/hir-def/src/lang_item.rs | 8 +- .../hir-def/src/macro_expansion_tests/mod.rs | 64 ++++---- .../crates/hir-def/src/nameres/collector.rs | 154 ++++++++---------- .../crates/hir-def/src/resolver.rs | 94 +++++------ .../crates/hir-expand/src/builtin/fn_macro.rs | 11 +- .../crates/hir-expand/src/cfg_process.rs | 8 +- .../crates/hir-expand/src/fixup.rs | 10 +- .../crates/hir-expand/src/lib.rs | 22 ++- .../crates/hir-expand/src/mod_path.rs | 21 +-- .../crates/hir-ty/src/autoderef.rs | 9 +- .../crates/hir-ty/src/builder.rs | 10 +- .../crates/hir-ty/src/chalk_db.rs | 64 ++++---- .../crates/hir-ty/src/consteval.rs | 8 +- .../hir-ty/src/diagnostics/decl_check.rs | 8 +- .../crates/hir-ty/src/diagnostics/expr.rs | 18 +- .../hir-ty/src/diagnostics/match_check.rs | 8 +- .../hir-ty/src/diagnostics/unsafe_check.rs | 17 +- .../crates/hir-ty/src/display.rs | 123 +++++++------- .../crates/hir-ty/src/dyn_compatibility.rs | 25 ++- .../rust-analyzer/crates/hir-ty/src/infer.rs | 47 +++--- .../crates/hir-ty/src/infer/cast.rs | 37 ++--- .../crates/hir-ty/src/infer/closure.rs | 154 ++++++++---------- .../crates/hir-ty/src/infer/coerce.rs | 46 +++--- .../crates/hir-ty/src/infer/expr.rs | 75 ++++----- .../crates/hir-ty/src/infer/mutability.rs | 119 ++++++-------- .../crates/hir-ty/src/infer/pat.rs | 31 ++-- .../rust-analyzer/crates/hir-ty/src/lower.rs | 8 +- .../crates/hir-ty/src/lower/path.rs | 120 +++++++------- .../crates/hir-ty/src/method_resolution.rs | 38 ++--- .../crates/hir-ty/src/mir/borrowck.rs | 5 +- .../crates/hir-ty/src/mir/eval.rs | 73 ++++----- .../crates/hir-ty/src/mir/eval/shim.rs | 76 +++++---- .../crates/hir-ty/src/mir/eval/shim/simd.rs | 11 +- .../crates/hir-ty/src/mir/lower.rs | 60 +++---- .../crates/hir-ty/src/mir/lower/as_place.rs | 17 +- .../hir-ty/src/mir/lower/pattern_matching.rs | 49 +++--- .../rust-analyzer/crates/hir-ty/src/traits.rs | 7 +- .../rust-analyzer/crates/hir-ty/src/utils.rs | 14 +- .../crates/hir/src/diagnostics.rs | 14 +- src/tools/rust-analyzer/crates/hir/src/lib.rs | 80 +++++---- .../rust-analyzer/crates/hir/src/semantics.rs | 93 +++++------ .../crates/hir/src/source_analyzer.rs | 83 +++++----- .../crates/hir/src/term_search.rs | 16 +- .../crates/hir/src/term_search/expr.rs | 8 +- .../src/handlers/add_lifetime_to_type.rs | 24 +-- .../src/handlers/add_missing_impl_members.rs | 14 +- .../src/handlers/apply_demorgan.rs | 8 +- .../src/handlers/convert_bool_then.rs | 23 ++- .../src/handlers/convert_closure_to_fn.rs | 57 ++++--- .../src/handlers/convert_from_to_tryfrom.rs | 19 +-- .../src/handlers/convert_into_to_from.rs | 8 +- .../convert_tuple_return_type_to_struct.rs | 8 +- .../convert_tuple_struct_to_named_struct.rs | 2 +- ...ert_two_arm_bool_match_to_matches_macro.rs | 8 +- .../src/handlers/desugar_try_expr.rs | 130 +++++++-------- .../src/handlers/expand_glob_import.rs | 10 +- .../src/handlers/extract_function.rs | 101 ++++++------ .../src/handlers/extract_module.rs | 81 +++++---- .../extract_struct_from_enum_variant.rs | 12 +- .../src/handlers/extract_type_alias.rs | 42 +++-- .../src/handlers/extract_variable.rs | 15 +- .../generate_documentation_template.rs | 18 +- .../src/handlers/generate_fn_type_alias.rs | 8 +- .../src/handlers/generate_function.rs | 8 +- .../src/handlers/generate_getter_or_setter.rs | 11 +- .../ide-assists/src/handlers/generate_impl.rs | 10 +- .../src/handlers/generate_trait_from_impl.rs | 22 +-- .../ide-assists/src/handlers/inline_call.rs | 49 +++--- .../src/handlers/move_const_to_impl.rs | 8 +- .../src/handlers/pull_assignment_up.rs | 16 +- .../ide-assists/src/handlers/raw_string.rs | 18 +- .../replace_qualified_name_with_use.rs | 8 +- .../src/handlers/unnecessary_async.rs | 8 +- .../src/handlers/unwrap_return_type.rs | 24 +-- .../ide-assists/src/handlers/unwrap_tuple.rs | 8 +- .../src/handlers/wrap_return_type.rs | 40 ++--- .../src/handlers/wrap_unwrap_cfg_attr.rs | 22 ++- .../crates/ide-assists/src/utils.rs | 48 +++--- .../crates/ide-completion/src/completions.rs | 17 +- .../ide-completion/src/completions/dot.rs | 11 +- .../src/completions/fn_param.rs | 8 +- .../src/completions/item_list/trait_impl.rs | 92 +++++------ .../ide-completion/src/completions/mod_.rs | 17 +- .../ide-completion/src/completions/pattern.rs | 21 ++- .../ide-completion/src/completions/postfix.rs | 153 ++++++++--------- .../ide-completion/src/completions/use_.rs | 8 +- .../ide-completion/src/completions/vis.rs | 10 +- .../ide-completion/src/context/analysis.rs | 140 +++++++--------- .../crates/ide-completion/src/item.rs | 8 +- .../crates/ide-completion/src/lib.rs | 14 +- .../crates/ide-completion/src/render.rs | 108 ++++++------ .../ide-completion/src/render/const_.rs | 8 +- .../ide-completion/src/render/function.rs | 34 ++-- .../ide-completion/src/render/type_alias.rs | 8 +- .../rust-analyzer/crates/ide-db/src/defs.rs | 64 ++++---- .../crates/ide-db/src/helpers.rs | 10 +- .../crates/ide-db/src/imports/insert_use.rs | 45 +++-- .../crates/ide-db/src/path_transform.rs | 100 ++++++------ .../rust-analyzer/crates/ide-db/src/rename.rs | 22 +-- .../rust-analyzer/crates/ide-db/src/search.rs | 83 +++++----- .../crates/ide-db/src/symbol_index.rs | 16 +- .../src/syntax_helpers/format_string.rs | 10 +- .../ide-db/src/syntax_helpers/node_ext.rs | 23 ++- .../ide-db/src/use_trivial_constructor.rs | 20 +-- .../src/handlers/json_is_not_rust.rs | 50 +++--- .../src/handlers/missing_fields.rs | 11 +- .../src/handlers/mutability_errors.rs | 24 +-- .../src/handlers/unlinked_file.rs | 14 +- .../crates/ide-diagnostics/src/lib.rs | 54 +++--- .../rust-analyzer/crates/ide-ssr/src/lib.rs | 21 ++- .../crates/ide-ssr/src/matching.rs | 101 ++++++------ .../crates/ide-ssr/src/replacing.rs | 28 ++-- .../crates/ide-ssr/src/resolving.rs | 61 +++---- .../crates/ide-ssr/src/search.rs | 27 ++- .../crates/ide/src/annotations.rs | 8 +- .../crates/ide/src/expand_macro.rs | 16 +- .../crates/ide/src/extend_selection.rs | 54 +++--- .../crates/ide/src/folding_ranges.rs | 88 +++++----- .../crates/ide/src/goto_definition.rs | 28 ++-- .../crates/ide/src/highlight_related.rs | 45 +++-- .../rust-analyzer/crates/ide/src/hover.rs | 13 +- .../crates/ide/src/hover/render.rs | 116 ++++++------- .../crates/ide/src/inlay_hints.rs | 12 +- .../crates/ide/src/inlay_hints/adjustment.rs | 8 +- .../crates/ide/src/inlay_hints/bind_pat.rs | 8 +- .../crates/ide/src/inlay_hints/chaining.rs | 11 +- .../ide/src/inlay_hints/closing_brace.rs | 10 +- .../crates/ide/src/inlay_hints/closure_ret.rs | 8 +- .../ide/src/inlay_hints/extern_block.rs | 8 +- .../ide/src/inlay_hints/generic_param.rs | 8 +- .../ide/src/inlay_hints/implicit_static.rs | 43 ++--- .../crates/ide/src/inlay_hints/lifetime.rs | 50 +++--- .../crates/ide/src/inlay_hints/param_name.rs | 8 +- .../crates/ide/src/join_lines.rs | 26 +-- .../crates/ide/src/parent_module.rs | 11 +- .../rust-analyzer/crates/ide/src/rename.rs | 8 +- .../rust-analyzer/crates/ide/src/runnables.rs | 34 ++-- .../crates/ide/src/signature_help.rs | 19 +-- .../crates/ide/src/static_index.rs | 8 +- .../ide/src/syntax_highlighting/highlight.rs | 39 +++-- .../crates/load-cargo/src/lib.rs | 8 +- .../rust-analyzer/crates/mbe/src/benchmark.rs | 32 ++-- .../crates/mbe/src/expander/matcher.rs | 12 +- .../crates/parser/src/grammar/expressions.rs | 69 ++++---- .../rust-analyzer/crates/parser/src/input.rs | 2 +- .../crates/parser/src/shortcuts.rs | 8 +- .../crates/query-group-macro/src/lib.rs | 18 +- .../crates/rust-analyzer/src/bin/main.rs | 12 +- .../rust-analyzer/src/cli/analysis_stats.rs | 111 ++++++------- .../rust-analyzer/src/cli/progress_report.rs | 10 +- .../rust-analyzer/src/cli/rustc_tests.rs | 8 +- .../crates/rust-analyzer/src/config.rs | 21 ++- .../src/config/patch_old_style.rs | 24 +-- .../rust-analyzer/src/diagnostics/to_proto.rs | 14 +- .../crates/rust-analyzer/src/flycheck.rs | 11 +- .../crates/rust-analyzer/src/global_state.rs | 26 +-- .../rust-analyzer/src/handlers/dispatch.rs | 8 +- .../src/handlers/notification.rs | 37 ++--- .../rust-analyzer/src/handlers/request.rs | 150 ++++++++--------- .../crates/rust-analyzer/src/main_loop.rs | 64 ++++---- .../crates/rust-analyzer/src/reload.rs | 14 +- .../rust-analyzer/crates/span/src/map.rs | 14 +- .../crates/syntax-bridge/src/lib.rs | 22 +-- .../src/prettify_macro_expansion.rs | 9 +- .../crates/syntax-bridge/src/tests.rs | 9 +- .../syntax-bridge/src/to_parser_input.rs | 20 +-- .../crates/syntax/src/ast/edit.rs | 24 +-- .../crates/syntax/src/ast/edit_in_place.rs | 41 +++-- .../crates/syntax/src/ast/prec.rs | 24 +-- .../crates/syntax/src/syntax_editor.rs | 8 +- .../rust-analyzer/crates/syntax/src/ted.rs | 42 ++--- .../crates/syntax/src/validation.rs | 92 +++++------ .../crates/test-fixture/src/lib.rs | 12 +- src/tools/rust-analyzer/crates/tt/src/lib.rs | 16 +- .../crates/vfs-notify/src/lib.rs | 85 +++++----- src/tools/rust-analyzer/xtask/src/codegen.rs | 10 +- .../rust-analyzer/xtask/src/publish/notes.rs | 138 ++++++++-------- 186 files changed, 3073 insertions(+), 3331 deletions(-) diff --git a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs index aed00aa9fc447..f83c21eb8d64a 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs @@ -134,10 +134,10 @@ fn next_cfg_expr(it: &mut tt::iter::TtIter<'_, S>) -> Option { }; // Eat comma separator - if let Some(TtElement::Leaf(tt::Leaf::Punct(punct))) = it.peek() { - if punct.char == ',' { - it.next(); - } + if let Some(TtElement::Leaf(tt::Leaf::Punct(punct))) = it.peek() + && punct.char == ',' + { + it.next(); } Some(ret) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs index b509e69b0d37b..53250510f875c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs @@ -377,10 +377,10 @@ fn parse_repr_tt(tt: &crate::tt::TopSubtree) -> Option { let mut align = None; if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() { tts.next(); - if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() { - if let Ok(a) = lit.symbol.as_str().parse() { - align = Align::from_bytes(a).ok(); - } + if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() + && let Ok(a) = lit.symbol.as_str().parse() + { + align = Align::from_bytes(a).ok(); } } ReprOptions { align, ..Default::default() } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs index abd1382801dda..3b9281ffb9c12 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs @@ -1487,13 +1487,13 @@ impl ExprCollector<'_> { ast::Expr::UnderscoreExpr(_) => self.alloc_pat_from_expr(Pat::Wild, syntax_ptr), ast::Expr::ParenExpr(e) => { // We special-case `(..)` for consistency with patterns. - if let Some(ast::Expr::RangeExpr(range)) = e.expr() { - if range.is_range_full() { - return Some(self.alloc_pat_from_expr( - Pat::Tuple { args: Box::default(), ellipsis: Some(0) }, - syntax_ptr, - )); - } + if let Some(ast::Expr::RangeExpr(range)) = e.expr() + && range.is_range_full() + { + return Some(self.alloc_pat_from_expr( + Pat::Tuple { args: Box::default(), ellipsis: Some(0) }, + syntax_ptr, + )); } return e.expr().and_then(|expr| self.maybe_collect_expr_as_pat(&expr)); } @@ -2569,19 +2569,18 @@ impl ExprCollector<'_> { } } RibKind::MacroDef(macro_id) => { - if let Some((parent_ctx, label_macro_id)) = hygiene_info { - if label_macro_id == **macro_id { - // A macro is allowed to refer to labels from before its declaration. - // Therefore, if we got to the rib of its declaration, give up its hygiene - // and use its parent expansion. - - hygiene_id = - HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db)); - hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| { - let expansion = self.db.lookup_intern_macro_call(expansion.into()); - (parent_ctx.parent(self.db), expansion.def) - }); - } + if let Some((parent_ctx, label_macro_id)) = hygiene_info + && label_macro_id == **macro_id + { + // A macro is allowed to refer to labels from before its declaration. + // Therefore, if we got to the rib of its declaration, give up its hygiene + // and use its parent expansion. + + hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db)); + hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| { + let expansion = self.db.lookup_intern_macro_call(expansion.into()); + (parent_ctx.parent(self.db), expansion.def) + }); } } _ => {} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs index 3bc4afb5c8ac3..230d1c9346362 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs @@ -259,10 +259,10 @@ impl ExprCollector<'_> { } }; - if let Some(operand_idx) = operand_idx { - if let Some(position_span) = to_span(arg.position_span) { - mappings.push((position_span, operand_idx)); - } + if let Some(operand_idx) = operand_idx + && let Some(position_span) = to_span(arg.position_span) + { + mappings.push((position_span, operand_idx)); } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path.rs index be006c98a5827..579465e10f932 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path.rs @@ -211,16 +211,17 @@ pub(super) fn lower_path( // Basically, even in rustc it is quite hacky: // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456 // We follow what it did anyway :) - if segments.len() == 1 && kind == PathKind::Plain { - if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { - let syn_ctxt = collector.expander.ctx_for_range(path.segment()?.syntax().text_range()); - if let Some(macro_call_id) = syn_ctxt.outer_expn(collector.db) { - if collector.db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner { - kind = match resolve_crate_root(collector.db, syn_ctxt) { - Some(crate_root) => PathKind::DollarCrate(crate_root), - None => PathKind::Crate, - } - } + if segments.len() == 1 + && kind == PathKind::Plain + && let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) + { + let syn_ctxt = collector.expander.ctx_for_range(path.segment()?.syntax().text_range()); + if let Some(macro_call_id) = syn_ctxt.outer_expn(collector.db) + && collector.db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner + { + kind = match resolve_crate_root(collector.db, syn_ctxt) { + Some(crate_root) => PathKind::DollarCrate(crate_root), + None => PathKind::Crate, } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs index f1b011333d94e..b81dcc1fe96df 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs @@ -900,14 +900,12 @@ impl Printer<'_> { let field_name = arg.name.display(self.db, edition).to_string(); let mut same_name = false; - if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] { - if let Binding { name, mode: BindingAnnotation::Unannotated, .. } = + if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] + && let Binding { name, mode: BindingAnnotation::Unannotated, .. } = &self.store.assert_expr_only().bindings[*id] - { - if name.as_str() == field_name { - same_name = true; - } - } + && name.as_str() == field_name + { + same_name = true; } w!(p, "{}", field_name); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs index dccfff002f23d..faa0ef8ceec7b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs @@ -107,11 +107,11 @@ struct FindPathCtx<'db> { /// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Option { // - if the item is a module, jump straight to module search - if !ctx.is_std_item { - if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item { - return find_path_for_module(ctx, &mut FxHashSet::default(), module_id, true, max_len) - .map(|choice| choice.path); - } + if !ctx.is_std_item + && let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item + { + return find_path_for_module(ctx, &mut FxHashSet::default(), module_id, true, max_len) + .map(|choice| choice.path); } let may_be_in_scope = match ctx.prefix { @@ -226,15 +226,15 @@ fn find_path_for_module( } // - if the module can be referenced as self, super or crate, do that - if let Some(kind) = is_kw_kind_relative_to_from(ctx.from_def_map, module_id, ctx.from) { - if ctx.prefix != PrefixKind::ByCrate || kind == PathKind::Crate { - return Some(Choice { - path: ModPath::from_segments(kind, None), - path_text_len: path_kind_len(kind), - stability: Stable, - prefer_due_to_prelude: false, - }); - } + if let Some(kind) = is_kw_kind_relative_to_from(ctx.from_def_map, module_id, ctx.from) + && (ctx.prefix != PrefixKind::ByCrate || kind == PathKind::Crate) + { + return Some(Choice { + path: ModPath::from_segments(kind, None), + path_text_len: path_kind_len(kind), + stability: Stable, + prefer_due_to_prelude: false, + }); } // - if the module is in the prelude, return it by that path @@ -604,29 +604,29 @@ fn find_local_import_locations( &def_map[module.local_id] }; - if let Some((name, vis, declared)) = data.scope.name_of(item) { - if vis.is_visible_from(db, from) { - let is_pub_or_explicit = match vis { - Visibility::Module(_, VisibilityExplicitness::Explicit) => { - cov_mark::hit!(explicit_private_imports); - true - } - Visibility::Module(_, VisibilityExplicitness::Implicit) => { - cov_mark::hit!(discount_private_imports); - false - } - Visibility::PubCrate(_) => true, - Visibility::Public => true, - }; - - // Ignore private imports unless they are explicit. these could be used if we are - // in a submodule of this module, but that's usually not - // what the user wants; and if this module can import - // the item and we're a submodule of it, so can we. - // Also this keeps the cached data smaller. - if declared || is_pub_or_explicit { - cb(visited_modules, name, module); + if let Some((name, vis, declared)) = data.scope.name_of(item) + && vis.is_visible_from(db, from) + { + let is_pub_or_explicit = match vis { + Visibility::Module(_, VisibilityExplicitness::Explicit) => { + cov_mark::hit!(explicit_private_imports); + true } + Visibility::Module(_, VisibilityExplicitness::Implicit) => { + cov_mark::hit!(discount_private_imports); + false + } + Visibility::PubCrate(_) => true, + Visibility::Public => true, + }; + + // Ignore private imports unless they are explicit. these could be used if we are + // in a submodule of this module, but that's usually not + // what the user wants; and if this module can import + // the item and we're a submodule of it, so can we. + // Also this keeps the cached data smaller. + if declared || is_pub_or_explicit { + cb(visited_modules, name, module); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index efa4399468501..8f526d1a2369a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -510,12 +510,11 @@ impl ItemScope { id: AttrId, idx: usize, ) { - if let Some(derives) = self.derive_macros.get_mut(&adt) { - if let Some(DeriveMacroInvocation { derive_call_ids, .. }) = + if let Some(derives) = self.derive_macros.get_mut(&adt) + && let Some(DeriveMacroInvocation { derive_call_ids, .. }) = derives.iter_mut().find(|&&mut DeriveMacroInvocation { attr_id, .. }| id == attr_id) - { - derive_call_ids[idx] = Some(call); - } + { + derive_call_ids[idx] = Some(call); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs index 5ab61c89394bf..032b287cd6a82 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs @@ -83,12 +83,12 @@ impl<'a> Ctx<'a> { .flat_map(|item| self.lower_mod_item(&item)) .collect(); - if let Some(ast::Expr::MacroExpr(tail_macro)) = stmts.expr() { - if let Some(call) = tail_macro.macro_call() { - cov_mark::hit!(macro_stmt_with_trailing_macro_expr); - if let Some(mod_item) = self.lower_mod_item(&call.into()) { - self.top_level.push(mod_item); - } + if let Some(ast::Expr::MacroExpr(tail_macro)) = stmts.expr() + && let Some(call) = tail_macro.macro_call() + { + cov_mark::hit!(macro_stmt_with_trailing_macro_expr); + if let Some(mod_item) = self.lower_mod_item(&call.into()) { + self.top_level.push(mod_item); } } @@ -112,12 +112,11 @@ impl<'a> Ctx<'a> { _ => None, }) .collect(); - if let Some(ast::Expr::MacroExpr(expr)) = block.tail_expr() { - if let Some(call) = expr.macro_call() { - if let Some(mod_item) = self.lower_mod_item(&call.into()) { - self.top_level.push(mod_item); - } - } + if let Some(ast::Expr::MacroExpr(expr)) = block.tail_expr() + && let Some(call) = expr.macro_call() + && let Some(mod_item) = self.lower_mod_item(&call.into()) + { + self.top_level.push(mod_item); } self.tree.vis.arena = self.visibilities.into_iter().collect(); self.tree.top_level = self.top_level.into_boxed_slice(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index 750308026eec6..d431f2140165e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -218,10 +218,10 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option for (_, module_data) in crate_def_map.modules() { for def in module_data.scope.declarations() { - if let ModuleDefId::TraitId(trait_) = def { - if db.attrs(trait_.into()).has_doc_notable_trait() { - traits.push(trait_); - } + if let ModuleDefId::TraitId(trait_) = def + && db.attrs(trait_.into()).has_doc_notable_trait() + { + traits.push(trait_); } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index 5e95b061399af..e8ae499d27b26 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -221,46 +221,42 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream _ => None, }; - if let Some(src) = src { - if let Some(file_id) = src.file_id.macro_file() { - if let MacroKind::Derive - | MacroKind::DeriveBuiltIn - | MacroKind::Attr - | MacroKind::AttrBuiltIn = file_id.kind(&db) - { - let call = file_id.call_node(&db); - let mut show_spans = false; - let mut show_ctxt = false; - for comment in - call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) - { - show_spans |= comment.to_string().contains("+spans"); - show_ctxt |= comment.to_string().contains("+syntaxctxt"); - } - let pp = pretty_print_macro_expansion( - src.value, - db.span_map(src.file_id).as_ref(), - show_spans, - show_ctxt, - ); - format_to!(expanded_text, "\n{}", pp) - } + if let Some(src) = src + && let Some(file_id) = src.file_id.macro_file() + && let MacroKind::Derive + | MacroKind::DeriveBuiltIn + | MacroKind::Attr + | MacroKind::AttrBuiltIn = file_id.kind(&db) + { + let call = file_id.call_node(&db); + let mut show_spans = false; + let mut show_ctxt = false; + for comment in call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) { + show_spans |= comment.to_string().contains("+spans"); + show_ctxt |= comment.to_string().contains("+syntaxctxt"); } + let pp = pretty_print_macro_expansion( + src.value, + db.span_map(src.file_id).as_ref(), + show_spans, + show_ctxt, + ); + format_to!(expanded_text, "\n{}", pp) } } for impl_id in def_map[local_id].scope.impls() { let src = impl_id.lookup(&db).source(&db); - if let Some(macro_file) = src.file_id.macro_file() { - if let MacroKind::DeriveBuiltIn | MacroKind::Derive = macro_file.kind(&db) { - let pp = pretty_print_macro_expansion( - src.value.syntax().clone(), - db.span_map(macro_file.into()).as_ref(), - false, - false, - ); - format_to!(expanded_text, "\n{}", pp) - } + if let Some(macro_file) = src.file_id.macro_file() + && let MacroKind::DeriveBuiltIn | MacroKind::Derive = macro_file.kind(&db) + { + let pp = pretty_print_macro_expansion( + src.value.syntax().clone(), + db.span_map(macro_file.into()).as_ref(), + false, + false, + ); + format_to!(expanded_text, "\n{}", pp) } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 0c3274d849ad8..267c4451b9d71 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -261,20 +261,20 @@ impl<'db> DefCollector<'db> { // Process other crate-level attributes. for attr in &*attrs { - if let Some(cfg) = attr.cfg() { - if self.cfg_options.check(&cfg) == Some(false) { - process = false; - break; - } + if let Some(cfg) = attr.cfg() + && self.cfg_options.check(&cfg) == Some(false) + { + process = false; + break; } let Some(attr_name) = attr.path.as_ident() else { continue }; match () { () if *attr_name == sym::recursion_limit => { - if let Some(limit) = attr.string_value() { - if let Ok(limit) = limit.as_str().parse() { - crate_data.recursion_limit = Some(limit); - } + if let Some(limit) = attr.string_value() + && let Ok(limit) = limit.as_str().parse() + { + crate_data.recursion_limit = Some(limit); } } () if *attr_name == sym::crate_type => { @@ -1188,56 +1188,44 @@ impl<'db> DefCollector<'db> { // Multiple globs may import the same item and they may override visibility from // previously resolved globs. Handle overrides here and leave the rest to // `ItemScope::push_res_with_import()`. - if let Some(def) = defs.types { - if let Some(prev_def) = prev_defs.types { - if def.def == prev_def.def - && self.from_glob_import.contains_type(module_id, name.clone()) - && def.vis != prev_def.vis - && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) - { - changed = true; - // This import is being handled here, don't pass it down to - // `ItemScope::push_res_with_import()`. - defs.types = None; - self.def_map.modules[module_id] - .scope - .update_visibility_types(name, def.vis); - } - } + if let Some(def) = defs.types + && let Some(prev_def) = prev_defs.types + && def.def == prev_def.def + && self.from_glob_import.contains_type(module_id, name.clone()) + && def.vis != prev_def.vis + && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) + { + changed = true; + // This import is being handled here, don't pass it down to + // `ItemScope::push_res_with_import()`. + defs.types = None; + self.def_map.modules[module_id].scope.update_visibility_types(name, def.vis); } - if let Some(def) = defs.values { - if let Some(prev_def) = prev_defs.values { - if def.def == prev_def.def - && self.from_glob_import.contains_value(module_id, name.clone()) - && def.vis != prev_def.vis - && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) - { - changed = true; - // See comment above. - defs.values = None; - self.def_map.modules[module_id] - .scope - .update_visibility_values(name, def.vis); - } - } + if let Some(def) = defs.values + && let Some(prev_def) = prev_defs.values + && def.def == prev_def.def + && self.from_glob_import.contains_value(module_id, name.clone()) + && def.vis != prev_def.vis + && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) + { + changed = true; + // See comment above. + defs.values = None; + self.def_map.modules[module_id].scope.update_visibility_values(name, def.vis); } - if let Some(def) = defs.macros { - if let Some(prev_def) = prev_defs.macros { - if def.def == prev_def.def - && self.from_glob_import.contains_macro(module_id, name.clone()) - && def.vis != prev_def.vis - && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) - { - changed = true; - // See comment above. - defs.macros = None; - self.def_map.modules[module_id] - .scope - .update_visibility_macros(name, def.vis); - } - } + if let Some(def) = defs.macros + && let Some(prev_def) = prev_defs.macros + && def.def == prev_def.def + && self.from_glob_import.contains_macro(module_id, name.clone()) + && def.vis != prev_def.vis + && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) + { + changed = true; + // See comment above. + defs.macros = None; + self.def_map.modules[module_id].scope.update_visibility_macros(name, def.vis); } } @@ -1392,15 +1380,14 @@ impl<'db> DefCollector<'db> { Resolved::Yes }; - if let Some(ident) = path.as_ident() { - if let Some(helpers) = self.def_map.derive_helpers_in_scope.get(&ast_id) { - if helpers.iter().any(|(it, ..)| it == ident) { - cov_mark::hit!(resolved_derive_helper); - // Resolved to derive helper. Collect the item's attributes again, - // starting after the derive helper. - return recollect_without(self); - } - } + if let Some(ident) = path.as_ident() + && let Some(helpers) = self.def_map.derive_helpers_in_scope.get(&ast_id) + && helpers.iter().any(|(it, ..)| it == ident) + { + cov_mark::hit!(resolved_derive_helper); + // Resolved to derive helper. Collect the item's attributes again, + // starting after the derive helper. + return recollect_without(self); } let def = match resolver_def_id(path) { @@ -1729,12 +1716,12 @@ impl ModCollector<'_, '_> { let mut process_mod_item = |item: ModItemId| { let attrs = self.item_tree.attrs(db, krate, item.ast_id()); - if let Some(cfg) = attrs.cfg() { - if !self.is_cfg_enabled(&cfg) { - let ast_id = item.ast_id().erase(); - self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg); - return; - } + if let Some(cfg) = attrs.cfg() + && !self.is_cfg_enabled(&cfg) + { + let ast_id = item.ast_id().erase(); + self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg); + return; } if let Err(()) = self.resolve_attributes(&attrs, item, container) { @@ -1871,14 +1858,13 @@ impl ModCollector<'_, '_> { if self.def_collector.def_map.block.is_none() && self.def_collector.is_proc_macro && self.module_id == DefMap::ROOT + && let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { - if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { - self.def_collector.export_proc_macro( - proc_macro, - InFile::new(self.file_id(), id), - fn_id, - ); - } + self.def_collector.export_proc_macro( + proc_macro, + InFile::new(self.file_id(), id), + fn_id, + ); } update_def(self.def_collector, fn_id.into(), &it.name, vis, false); @@ -2419,13 +2405,13 @@ impl ModCollector<'_, '_> { macro_id, &self.item_tree[mac.visibility], ); - if let Some(helpers) = helpers_opt { - if self.def_collector.def_map.block.is_none() { - Arc::get_mut(&mut self.def_collector.def_map.data) - .unwrap() - .exported_derives - .insert(macro_id.into(), helpers); - } + if let Some(helpers) = helpers_opt + && self.def_collector.def_map.block.is_none() + { + Arc::get_mut(&mut self.def_collector.def_map.data) + .unwrap() + .exported_derives + .insert(macro_id.into(), helpers); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs index 316ad5dae69df..a10990e6a8f9f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs @@ -228,15 +228,15 @@ impl<'db> Resolver<'db> { ResolvePathResultPrefixInfo::default(), )); } - } else if let &GenericDefId::AdtId(adt) = def { - if *first_name == sym::Self_ { - return Some(( - TypeNs::AdtSelfType(adt), - remaining_idx(), - None, - ResolvePathResultPrefixInfo::default(), - )); - } + } else if let &GenericDefId::AdtId(adt) = def + && *first_name == sym::Self_ + { + return Some(( + TypeNs::AdtSelfType(adt), + remaining_idx(), + None, + ResolvePathResultPrefixInfo::default(), + )); } if let Some(id) = params.find_type_by_name(first_name, *def) { return Some(( @@ -401,13 +401,13 @@ impl<'db> Resolver<'db> { handle_macro_def_scope(db, &mut hygiene_id, &mut hygiene_info, macro_id) } Scope::GenericParams { params, def } => { - if let &GenericDefId::ImplId(impl_) = def { - if *first_name == sym::Self_ { - return Some(( - ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_), None), - ResolvePathResultPrefixInfo::default(), - )); - } + if let &GenericDefId::ImplId(impl_) = def + && *first_name == sym::Self_ + { + return Some(( + ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_), None), + ResolvePathResultPrefixInfo::default(), + )); } if let Some(id) = params.find_const_by_name(first_name, *def) { let val = ValueNs::GenericParam(id); @@ -436,14 +436,14 @@ impl<'db> Resolver<'db> { ResolvePathResultPrefixInfo::default(), )); } - } else if let &GenericDefId::AdtId(adt) = def { - if *first_name == sym::Self_ { - let ty = TypeNs::AdtSelfType(adt); - return Some(( - ResolveValueResult::Partial(ty, 1, None), - ResolvePathResultPrefixInfo::default(), - )); - } + } else if let &GenericDefId::AdtId(adt) = def + && *first_name == sym::Self_ + { + let ty = TypeNs::AdtSelfType(adt); + return Some(( + ResolveValueResult::Partial(ty, 1, None), + ResolvePathResultPrefixInfo::default(), + )); } if let Some(id) = params.find_type_by_name(first_name, *def) { let ty = TypeNs::GenericParam(id); @@ -469,13 +469,14 @@ impl<'db> Resolver<'db> { // If a path of the shape `u16::from_le_bytes` failed to resolve at all, then we fall back // to resolving to the primitive type, to allow this to still work in the presence of // `use core::u16;`. - if path.kind == PathKind::Plain && n_segments > 1 { - if let Some(builtin) = BuiltinType::by_name(first_name) { - return Some(( - ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1, None), - ResolvePathResultPrefixInfo::default(), - )); - } + if path.kind == PathKind::Plain + && n_segments > 1 + && let Some(builtin) = BuiltinType::by_name(first_name) + { + return Some(( + ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1, None), + ResolvePathResultPrefixInfo::default(), + )); } None @@ -660,12 +661,11 @@ impl<'db> Resolver<'db> { Scope::BlockScope(m) => traits.extend(m.def_map[m.module_id].scope.traits()), &Scope::GenericParams { def: GenericDefId::ImplId(impl_), .. } => { let impl_data = db.impl_signature(impl_); - if let Some(target_trait) = impl_data.target_trait { - if let Some(TypeNs::TraitId(trait_)) = self + if let Some(target_trait) = impl_data.target_trait + && let Some(TypeNs::TraitId(trait_)) = self .resolve_path_in_type_ns_fully(db, &impl_data.store[target_trait.path]) - { - traits.insert(trait_); - } + { + traits.insert(trait_); } } _ => (), @@ -918,17 +918,17 @@ fn handle_macro_def_scope( hygiene_info: &mut Option<(SyntaxContext, MacroDefId)>, macro_id: &MacroDefId, ) { - if let Some((parent_ctx, label_macro_id)) = hygiene_info { - if label_macro_id == macro_id { - // A macro is allowed to refer to variables from before its declaration. - // Therefore, if we got to the rib of its declaration, give up its hygiene - // and use its parent expansion. - *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db)); - *hygiene_info = parent_ctx.outer_expn(db).map(|expansion| { - let expansion = db.lookup_intern_macro_call(expansion.into()); - (parent_ctx.parent(db), expansion.def) - }); - } + if let Some((parent_ctx, label_macro_id)) = hygiene_info + && label_macro_id == macro_id + { + // A macro is allowed to refer to variables from before its declaration. + // Therefore, if we got to the rib of its declaration, give up its hygiene + // and use its parent expansion. + *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db)); + *hygiene_info = parent_ctx.outer_expn(db).map(|expansion| { + let expansion = db.lookup_intern_macro_call(expansion.into()); + (parent_ctx.parent(db), expansion.def) + }); } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs index 4a9af01091f2e..58ab7f470c40e 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs @@ -555,12 +555,11 @@ fn concat_expand( // FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses // to ensure the right parsing order, so skip the parentheses here. Ideally we'd // implement rustc's model. cc https://github.com/rust-lang/rust-analyzer/pull/10623 - if let TtElement::Subtree(subtree, subtree_iter) = &t { - if let [tt::TokenTree::Leaf(tt)] = subtree_iter.remaining().flat_tokens() { - if subtree.delimiter.kind == tt::DelimiterKind::Parenthesis { - t = TtElement::Leaf(tt); - } - } + if let TtElement::Subtree(subtree, subtree_iter) = &t + && let [tt::TokenTree::Leaf(tt)] = subtree_iter.remaining().flat_tokens() + && subtree.delimiter.kind == tt::DelimiterKind::Parenthesis + { + t = TtElement::Leaf(tt); } match t { TtElement::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs index c6ea4a3a33db8..d5ebd6ee19f5c 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs @@ -334,10 +334,10 @@ where _ => Some(CfgExpr::Atom(CfgAtom::Flag(name))), }, }; - if let Some(NodeOrToken::Token(element)) = iter.peek() { - if element.kind() == syntax::T![,] { - iter.next(); - } + if let Some(NodeOrToken::Token(element)) = iter.peek() + && element.kind() == syntax::T![,] + { + iter.next(); } result } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs index 4a4a3e52aea43..fe77e1565987f 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs @@ -280,8 +280,8 @@ pub(crate) fn fixup_syntax( } }, ast::RecordExprField(it) => { - if let Some(colon) = it.colon_token() { - if it.name_ref().is_some() && it.expr().is_none() { + if let Some(colon) = it.colon_token() + && it.name_ref().is_some() && it.expr().is_none() { append.insert(colon.into(), vec![ Leaf::Ident(Ident { sym: sym::__ra_fixup, @@ -290,11 +290,10 @@ pub(crate) fn fixup_syntax( }) ]); } - } }, ast::Path(it) => { - if let Some(colon) = it.coloncolon_token() { - if it.segment().is_none() { + if let Some(colon) = it.coloncolon_token() + && it.segment().is_none() { append.insert(colon.into(), vec![ Leaf::Ident(Ident { sym: sym::__ra_fixup, @@ -303,7 +302,6 @@ pub(crate) fn fixup_syntax( }) ]); } - } }, ast::ClosureExpr(it) => { if it.body().is_none() { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index ac61b22009706..472ec83ffef5b 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -365,12 +365,11 @@ impl HirFileId { HirFileId::FileId(id) => break id, HirFileId::MacroFile(file) => { let loc = db.lookup_intern_macro_call(file); - if loc.def.is_include() { - if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind { - if let Ok(it) = include_input_to_file_id(db, file, &eager.arg) { - break it; - } - } + if loc.def.is_include() + && let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind + && let Ok(it) = include_input_to_file_id(db, file, &eager.arg) + { + break it; } self = loc.kind.file_id(); } @@ -648,12 +647,11 @@ impl MacroCallLoc { db: &dyn ExpandDatabase, macro_call_id: MacroCallId, ) -> Option { - if self.def.is_include() { - if let MacroCallKind::FnLike { eager: Some(eager), .. } = &self.kind { - if let Ok(it) = include_input_to_file_id(db, macro_call_id, &eager.arg) { - return Some(it); - } - } + if self.def.is_include() + && let MacroCallKind::FnLike { eager: Some(eager), .. } = &self.kind + && let Ok(it) = include_input_to_file_id(db, macro_call_id, &eager.arg) + { + return Some(it); } None diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs index 9f1e3879e1eeb..d84d978cdb7ed 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs @@ -273,16 +273,17 @@ fn convert_path( // Basically, even in rustc it is quite hacky: // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456 // We follow what it did anyway :) - if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain { - if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { - let syn_ctx = span_for_range(segment.syntax().text_range()); - if let Some(macro_call_id) = syn_ctx.outer_expn(db) { - if db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner { - mod_path.kind = match resolve_crate_root(db, syn_ctx) { - Some(crate_root) => PathKind::DollarCrate(crate_root), - None => PathKind::Crate, - } - } + if mod_path.segments.len() == 1 + && mod_path.kind == PathKind::Plain + && let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) + { + let syn_ctx = span_for_range(segment.syntax().text_range()); + if let Some(macro_call_id) = syn_ctx.outer_expn(db) + && db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner + { + mod_path.kind = match resolve_crate_root(db, syn_ctx) { + Some(crate_root) => PathKind::DollarCrate(crate_root), + None => PathKind::Crate, } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs index cc8f7bf04a5cb..26ca7fb9a15ec 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs @@ -197,10 +197,11 @@ pub(crate) fn deref_by_trait( // effectively bump the MSRV of rust-analyzer to 1.84 due to 1.83 and below lacking the // blanked impl on `Deref`. #[expect(clippy::overly_complex_bool_expr)] - if use_receiver_trait && false { - if let Some(receiver) = LangItem::Receiver.resolve_trait(db, table.trait_env.krate) { - return Some(receiver); - } + if use_receiver_trait + && false + && let Some(receiver) = LangItem::Receiver.resolve_trait(db, table.trait_env.krate) + { + return Some(receiver); } // Old rustc versions might not have `Receiver` trait. // Fallback to `Deref` if they don't diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs index 77d15a73af6ff..8af8fb73f344e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs @@ -309,11 +309,11 @@ impl TyBuilder { if let Some(defaults) = defaults.get(self.vec.len()..) { for default_ty in defaults { // NOTE(skip_binders): we only check if the arg type is error type. - if let Some(x) = default_ty.skip_binders().ty(Interner) { - if x.is_unknown() { - self.vec.push(fallback().cast(Interner)); - continue; - } + if let Some(x) = default_ty.skip_binders().ty(Interner) + && x.is_unknown() + { + self.vec.push(fallback().cast(Interner)); + continue; } // Each default can only depend on the previous parameters. self.vec.push(default_ty.clone().substitute(Interner, &*self.vec).cast(Interner)); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index 26b635298a651..3ba7c93d4fb76 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -83,34 +83,34 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None }) } fn discriminant_type(&self, ty: chalk_ir::Ty) -> chalk_ir::Ty { - if let chalk_ir::TyKind::Adt(id, _) = ty.kind(Interner) { - if let hir_def::AdtId::EnumId(e) = id.0 { - let enum_data = self.db.enum_signature(e); - let ty = enum_data.repr.unwrap_or_default().discr_type(); - return chalk_ir::TyKind::Scalar(match ty { - hir_def::layout::IntegerType::Pointer(is_signed) => match is_signed { - true => chalk_ir::Scalar::Int(chalk_ir::IntTy::Isize), - false => chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize), - }, - hir_def::layout::IntegerType::Fixed(size, is_signed) => match is_signed { - true => chalk_ir::Scalar::Int(match size { - hir_def::layout::Integer::I8 => chalk_ir::IntTy::I8, - hir_def::layout::Integer::I16 => chalk_ir::IntTy::I16, - hir_def::layout::Integer::I32 => chalk_ir::IntTy::I32, - hir_def::layout::Integer::I64 => chalk_ir::IntTy::I64, - hir_def::layout::Integer::I128 => chalk_ir::IntTy::I128, - }), - false => chalk_ir::Scalar::Uint(match size { - hir_def::layout::Integer::I8 => chalk_ir::UintTy::U8, - hir_def::layout::Integer::I16 => chalk_ir::UintTy::U16, - hir_def::layout::Integer::I32 => chalk_ir::UintTy::U32, - hir_def::layout::Integer::I64 => chalk_ir::UintTy::U64, - hir_def::layout::Integer::I128 => chalk_ir::UintTy::U128, - }), - }, - }) - .intern(Interner); - } + if let chalk_ir::TyKind::Adt(id, _) = ty.kind(Interner) + && let hir_def::AdtId::EnumId(e) = id.0 + { + let enum_data = self.db.enum_signature(e); + let ty = enum_data.repr.unwrap_or_default().discr_type(); + return chalk_ir::TyKind::Scalar(match ty { + hir_def::layout::IntegerType::Pointer(is_signed) => match is_signed { + true => chalk_ir::Scalar::Int(chalk_ir::IntTy::Isize), + false => chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize), + }, + hir_def::layout::IntegerType::Fixed(size, is_signed) => match is_signed { + true => chalk_ir::Scalar::Int(match size { + hir_def::layout::Integer::I8 => chalk_ir::IntTy::I8, + hir_def::layout::Integer::I16 => chalk_ir::IntTy::I16, + hir_def::layout::Integer::I32 => chalk_ir::IntTy::I32, + hir_def::layout::Integer::I64 => chalk_ir::IntTy::I64, + hir_def::layout::Integer::I128 => chalk_ir::IntTy::I128, + }), + false => chalk_ir::Scalar::Uint(match size { + hir_def::layout::Integer::I8 => chalk_ir::UintTy::U8, + hir_def::layout::Integer::I16 => chalk_ir::UintTy::U16, + hir_def::layout::Integer::I32 => chalk_ir::UintTy::U32, + hir_def::layout::Integer::I64 => chalk_ir::UintTy::U64, + hir_def::layout::Integer::I128 => chalk_ir::UintTy::U128, + }), + }, + }) + .intern(Interner); } chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U8)).intern(Interner) } @@ -142,10 +142,10 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { ) -> Option { if let TyKind::BoundVar(bv) = ty.kind(Interner) { let binders = binders.as_slice(Interner); - if bv.debruijn == DebruijnIndex::INNERMOST { - if let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind { - return Some(tk); - } + if bv.debruijn == DebruijnIndex::INNERMOST + && let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind + { + return Some(tk); } } None diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs index 14b9cd203f60a..f30ec839a0096 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs @@ -342,10 +342,10 @@ pub(crate) fn eval_to_const( return c; } } - if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, ctx.body, &infer, expr) { - if let Ok((Ok(result), _)) = interpret_mir(db, Arc::new(mir_body), true, None) { - return result; - } + if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, ctx.body, &infer, expr) + && let Ok((Ok(result), _)) = interpret_mir(db, Arc::new(mir_body), true, None) + { + return result; } unknown_const(infer[expr].clone()) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs index 40fe3073cf2cd..0815e62f87eef 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs @@ -657,10 +657,10 @@ impl<'a> DeclValidator<'a> { } fn is_trait_impl_container(&self, container_id: ItemContainerId) -> bool { - if let ItemContainerId::ImplId(impl_id) = container_id { - if self.db.impl_trait(impl_id).is_some() { - return true; - } + if let ItemContainerId::ImplId(impl_id) = container_id + && self.db.impl_trait(impl_id).is_some() + { + return true; } false } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index cc531f076dd1f..b26bd2b8fa9c4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -528,15 +528,15 @@ impl FilterMapNextChecker { return None; } - if *function_id == self.next_function_id? { - if let Some(prev_filter_map_expr_id) = self.prev_filter_map_expr_id { - let is_dyn_trait = self - .prev_receiver_ty - .as_ref() - .is_some_and(|it| it.strip_references().dyn_trait().is_some()); - if *receiver_expr_id == prev_filter_map_expr_id && !is_dyn_trait { - return Some(()); - } + if *function_id == self.next_function_id? + && let Some(prev_filter_map_expr_id) = self.prev_filter_map_expr_id + { + let is_dyn_trait = self + .prev_receiver_ty + .as_ref() + .is_some_and(|it| it.strip_references().dyn_trait().is_some()); + if *receiver_expr_id == prev_filter_map_expr_id && !is_dyn_trait { + return Some(()); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs index ca132fbdc454a..e803b56a1ed8f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs @@ -382,10 +382,10 @@ impl HirDisplay for Pat { let subpats = (0..num_fields).map(|i| { WriteWith(move |f| { let fid = LocalFieldId::from_raw((i as u32).into()); - if let Some(p) = subpatterns.get(i) { - if p.field == fid { - return p.pattern.hir_fmt(f); - } + if let Some(p) = subpatterns.get(i) + && p.field == fid + { + return p.pattern.hir_fmt(f); } if let Some(p) = subpatterns.iter().find(|p| p.field == fid) { p.pattern.hir_fmt(f) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index f6ad3c7aae2d8..827585e50693a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -272,10 +272,10 @@ impl<'db> UnsafeVisitor<'db> { if let Some(func) = callee.as_fn_def(self.db) { self.check_call(current, func); } - if let TyKind::Function(fn_ptr) = callee.kind(Interner) { - if fn_ptr.sig.safety == chalk_ir::Safety::Unsafe { - self.on_unsafe_op(current.into(), UnsafetyReason::UnsafeFnCall); - } + if let TyKind::Function(fn_ptr) = callee.kind(Interner) + && fn_ptr.sig.safety == chalk_ir::Safety::Unsafe + { + self.on_unsafe_op(current.into(), UnsafetyReason::UnsafeFnCall); } } Expr::Path(path) => { @@ -346,12 +346,11 @@ impl<'db> UnsafeVisitor<'db> { Expr::Cast { .. } => self.inside_assignment = inside_assignment, Expr::Field { .. } => { self.inside_assignment = inside_assignment; - if !inside_assignment { - if let Some(Either::Left(FieldId { parent: VariantId::UnionId(_), .. })) = + if !inside_assignment + && let Some(Either::Left(FieldId { parent: VariantId::UnionId(_), .. })) = self.infer.field_resolution(current) - { - self.on_unsafe_op(current.into(), UnsafetyReason::UnionField); - } + { + self.on_unsafe_op(current.into(), UnsafetyReason::UnionField); } } Expr::Unsafe { statements, .. } => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index f0e31ebd020ca..8f35a3c214551 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -608,48 +608,46 @@ impl HirDisplay for ProjectionTy { // if we are projection on a type parameter, check if the projection target has bounds // itself, if so, we render them directly as `impl Bound` instead of the less useful // `::Assoc` - if !f.display_kind.is_source_code() { - if let TyKind::Placeholder(idx) = self_ty.kind(Interner) { - if !f.bounds_formatting_ctx.contains(self) { - let db = f.db; - let id = from_placeholder_idx(db, *idx); - let generics = generics(db, id.parent); - - let substs = generics.placeholder_subst(db); - let bounds = db - .generic_predicates(id.parent) - .iter() - .map(|pred| pred.clone().substitute(Interner, &substs)) - .filter(|wc| { - let ty = match wc.skip_binders() { - WhereClause::Implemented(tr) => tr.self_type_parameter(Interner), - WhereClause::TypeOutlives(t) => t.ty.clone(), - // We shouldn't be here if these exist - WhereClause::AliasEq(_) | WhereClause::LifetimeOutlives(_) => { - return false; - } - }; - let TyKind::Alias(AliasTy::Projection(proj)) = ty.kind(Interner) else { - return false; - }; - proj == self - }) - .collect::>(); - if !bounds.is_empty() { - return f.format_bounds_with(self.clone(), |f| { - write_bounds_like_dyn_trait_with_prefix( - f, - "impl", - Either::Left( - &TyKind::Alias(AliasTy::Projection(self.clone())) - .intern(Interner), - ), - &bounds, - SizedByDefault::NotSized, - ) - }); - } - } + if !f.display_kind.is_source_code() + && let TyKind::Placeholder(idx) = self_ty.kind(Interner) + && !f.bounds_formatting_ctx.contains(self) + { + let db = f.db; + let id = from_placeholder_idx(db, *idx); + let generics = generics(db, id.parent); + + let substs = generics.placeholder_subst(db); + let bounds = db + .generic_predicates(id.parent) + .iter() + .map(|pred| pred.clone().substitute(Interner, &substs)) + .filter(|wc| { + let ty = match wc.skip_binders() { + WhereClause::Implemented(tr) => tr.self_type_parameter(Interner), + WhereClause::TypeOutlives(t) => t.ty.clone(), + // We shouldn't be here if these exist + WhereClause::AliasEq(_) | WhereClause::LifetimeOutlives(_) => { + return false; + } + }; + let TyKind::Alias(AliasTy::Projection(proj)) = ty.kind(Interner) else { + return false; + }; + proj == self + }) + .collect::>(); + if !bounds.is_empty() { + return f.format_bounds_with(self.clone(), |f| { + write_bounds_like_dyn_trait_with_prefix( + f, + "impl", + Either::Left( + &TyKind::Alias(AliasTy::Projection(self.clone())).intern(Interner), + ), + &bounds, + SizedByDefault::NotSized, + ) + }); } } @@ -1860,18 +1858,13 @@ fn write_bounds_like_dyn_trait( write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?; f.end_location_link(); if is_fn_trait { - if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) { - if let Some(args) = + if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) + && let Some(args) = params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple()) - { - write!(f, "(")?; - hir_fmt_generic_arguments( - f, - args.as_slice(Interner), - self_.ty(Interner), - )?; - write!(f, ")")?; - } + { + write!(f, "(")?; + hir_fmt_generic_arguments(f, args.as_slice(Interner), self_.ty(Interner))?; + write!(f, ")")?; } } else { let params = generic_args_sans_defaults( @@ -1879,13 +1872,13 @@ fn write_bounds_like_dyn_trait( Some(trait_.into()), trait_ref.substitution.as_slice(Interner), ); - if let [self_, params @ ..] = params { - if !params.is_empty() { - write!(f, "<")?; - hir_fmt_generic_arguments(f, params, self_.ty(Interner))?; - // there might be assoc type bindings, so we leave the angle brackets open - angle_open = true; - } + if let [self_, params @ ..] = params + && !params.is_empty() + { + write!(f, "<")?; + hir_fmt_generic_arguments(f, params, self_.ty(Interner))?; + // there might be assoc type bindings, so we leave the angle brackets open + angle_open = true; } } } @@ -2443,11 +2436,11 @@ impl HirDisplayWithExpressionStore for Path { generic_args.args[0].hir_fmt(f, store)?; } } - if let Some(ret) = generic_args.bindings[0].type_ref { - if !matches!(&store[ret], TypeRef::Tuple(v) if v.is_empty()) { - write!(f, " -> ")?; - ret.hir_fmt(f, store)?; - } + if let Some(ret) = generic_args.bindings[0].type_ref + && !matches!(&store[ret], TypeRef::Tuple(v) if v.is_empty()) + { + write!(f, " -> ")?; + ret.hir_fmt(f, store)?; } } hir_def::expr_store::path::GenericArgsParentheses::No => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs index 30949c83bfae1..6294d683e6c02 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs @@ -136,16 +136,15 @@ pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> b let predicates = predicates.iter().map(|p| p.skip_binders().skip_binders().clone()); elaborate_clause_supertraits(db, predicates).any(|pred| match pred { WhereClause::Implemented(trait_ref) => { - if from_chalk_trait_id(trait_ref.trait_id) == sized { - if let TyKind::BoundVar(it) = + if from_chalk_trait_id(trait_ref.trait_id) == sized + && let TyKind::BoundVar(it) = *trait_ref.self_type_parameter(Interner).kind(Interner) - { - // Since `generic_predicates` is `Binder>`, the `DebrujinIndex` of - // self-parameter is `1` - return it - .index_if_bound_at(DebruijnIndex::ONE) - .is_some_and(|idx| idx == trait_self_param_idx); - } + { + // Since `generic_predicates` is `Binder>`, the `DebrujinIndex` of + // self-parameter is `1` + return it + .index_if_bound_at(DebruijnIndex::ONE) + .is_some_and(|idx| idx == trait_self_param_idx); } false } @@ -401,10 +400,10 @@ where cb(MethodViolationCode::ReferencesSelfOutput)?; } - if !func_data.is_async() { - if let Some(mvc) = contains_illegal_impl_trait_in_trait(db, &sig) { - cb(mvc)?; - } + if !func_data.is_async() + && let Some(mvc) = contains_illegal_impl_trait_in_trait(db, &sig) + { + cb(mvc)?; } let generic_params = db.generic_params(func.into()); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 7c39afa0ef896..86345b23364d3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -902,12 +902,12 @@ impl<'db> InferenceContext<'db> { return false; } - if let UnresolvedMethodCall { field_with_same_name, .. } = diagnostic { - if let Some(ty) = field_with_same_name { - *ty = table.resolve_completely(ty.clone()); - if ty.contains_unknown() { - *field_with_same_name = None; - } + if let UnresolvedMethodCall { field_with_same_name, .. } = diagnostic + && let Some(ty) = field_with_same_name + { + *ty = table.resolve_completely(ty.clone()); + if ty.contains_unknown() { + *field_with_same_name = None; } } } @@ -1010,12 +1010,12 @@ impl<'db> InferenceContext<'db> { param_tys.push(va_list_ty); } let mut param_tys = param_tys.into_iter().chain(iter::repeat(self.table.new_type_var())); - if let Some(self_param) = self.body.self_param { - if let Some(ty) = param_tys.next() { - let ty = self.insert_type_vars(ty); - let ty = self.normalize_associated_types_in(ty); - self.write_binding_ty(self_param, ty); - } + if let Some(self_param) = self.body.self_param + && let Some(ty) = param_tys.next() + { + let ty = self.insert_type_vars(ty); + let ty = self.normalize_associated_types_in(ty); + self.write_binding_ty(self_param, ty); } let mut tait_candidates = FxHashSet::default(); for (ty, pat) in param_tys.zip(&*self.body.params) { @@ -1199,20 +1199,19 @@ impl<'db> InferenceContext<'db> { ) -> std::ops::ControlFlow { let ty = self.table.resolve_ty_shallow(ty); - if let TyKind::OpaqueType(id, _) = ty.kind(Interner) { - if let ImplTraitId::TypeAliasImplTrait(alias_id, _) = + if let TyKind::OpaqueType(id, _) = ty.kind(Interner) + && let ImplTraitId::TypeAliasImplTrait(alias_id, _) = self.db.lookup_intern_impl_trait_id((*id).into()) - { - let loc = self.db.lookup_intern_type_alias(alias_id); - match loc.container { - ItemContainerId::ImplId(impl_id) => { - self.assocs.insert(*id, (impl_id, ty.clone())); - } - ItemContainerId::ModuleId(..) | ItemContainerId::ExternBlockId(..) => { - self.non_assocs.insert(*id, ty.clone()); - } - _ => {} + { + let loc = self.db.lookup_intern_type_alias(alias_id); + match loc.container { + ItemContainerId::ImplId(impl_id) => { + self.assocs.insert(*id, (impl_id, ty.clone())); + } + ItemContainerId::ModuleId(..) | ItemContainerId::ExternBlockId(..) => { + self.non_assocs.insert(*id, ty.clone()); } + _ => {} } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs index 4e95eca3f9402..f0a4167f8e250 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs @@ -233,26 +233,25 @@ impl CastCheck { F: FnMut(ExprId, Vec), { // Mutability order is opposite to rustc. `Mut < Not` - if m_expr <= m_cast { - if let TyKind::Array(ety, _) = t_expr.kind(Interner) { - // Coerce to a raw pointer so that we generate RawPtr in MIR. - let array_ptr_type = TyKind::Raw(m_expr, t_expr.clone()).intern(Interner); - if let Ok((adj, _)) = table.coerce(&self.expr_ty, &array_ptr_type, CoerceNever::Yes) - { - apply_adjustments(self.source_expr, adj); - } else { - never!( - "could not cast from reference to array to pointer to array ({:?} to {:?})", - self.expr_ty, - array_ptr_type - ); - } + if m_expr <= m_cast + && let TyKind::Array(ety, _) = t_expr.kind(Interner) + { + // Coerce to a raw pointer so that we generate RawPtr in MIR. + let array_ptr_type = TyKind::Raw(m_expr, t_expr.clone()).intern(Interner); + if let Ok((adj, _)) = table.coerce(&self.expr_ty, &array_ptr_type, CoerceNever::Yes) { + apply_adjustments(self.source_expr, adj); + } else { + never!( + "could not cast from reference to array to pointer to array ({:?} to {:?})", + self.expr_ty, + array_ptr_type + ); + } - // This is a less strict condition than rustc's `demand_eqtype`, - // but false negative is better than false positive - if table.coerce(ety, t_cast, CoerceNever::Yes).is_ok() { - return Ok(()); - } + // This is a less strict condition than rustc's `demand_eqtype`, + // but false negative is better than false positive + if table.coerce(ety, t_cast, CoerceNever::Yes).is_ok() { + return Ok(()); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index c3029bf2b59ad..8024c1a9a4e92 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -176,12 +176,12 @@ impl InferenceContext<'_> { } // Deduction based on the expected `dyn Fn` is done separately. - if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner) { - if let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty) { - let expected_sig_ty = TyKind::Function(sig).intern(Interner); + if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner) + && let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty) + { + let expected_sig_ty = TyKind::Function(sig).intern(Interner); - self.unify(sig_ty, &expected_sig_ty); - } + self.unify(sig_ty, &expected_sig_ty); } } @@ -208,14 +208,13 @@ impl InferenceContext<'_> { alias: AliasTy::Projection(projection_ty), ty: projected_ty, }) = bound.skip_binders() - { - if let Some(sig) = self.deduce_sig_from_projection( + && let Some(sig) = self.deduce_sig_from_projection( closure_kind, projection_ty, projected_ty, - ) { - return Some(sig); - } + ) + { + return Some(sig); } None }); @@ -254,55 +253,44 @@ impl InferenceContext<'_> { let mut expected_kind = None; for clause in elaborate_clause_supertraits(self.db, clauses.rev()) { - if expected_sig.is_none() { - if let WhereClause::AliasEq(AliasEq { - alias: AliasTy::Projection(projection), - ty, - }) = &clause - { - let inferred_sig = - self.deduce_sig_from_projection(closure_kind, projection, ty); - // Make sure that we didn't infer a signature that mentions itself. - // This can happen when we elaborate certain supertrait bounds that - // mention projections containing the `Self` type. See rust-lang/rust#105401. - struct MentionsTy<'a> { - expected_ty: &'a Ty, - } - impl TypeVisitor for MentionsTy<'_> { - type BreakTy = (); + if expected_sig.is_none() + && let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) = + &clause + { + let inferred_sig = self.deduce_sig_from_projection(closure_kind, projection, ty); + // Make sure that we didn't infer a signature that mentions itself. + // This can happen when we elaborate certain supertrait bounds that + // mention projections containing the `Self` type. See rust-lang/rust#105401. + struct MentionsTy<'a> { + expected_ty: &'a Ty, + } + impl TypeVisitor for MentionsTy<'_> { + type BreakTy = (); - fn interner(&self) -> Interner { - Interner - } + fn interner(&self) -> Interner { + Interner + } - fn as_dyn( - &mut self, - ) -> &mut dyn TypeVisitor - { - self - } + fn as_dyn( + &mut self, + ) -> &mut dyn TypeVisitor + { + self + } - fn visit_ty( - &mut self, - t: &Ty, - db: chalk_ir::DebruijnIndex, - ) -> ControlFlow<()> { - if t == self.expected_ty { - ControlFlow::Break(()) - } else { - t.super_visit_with(self, db) - } + fn visit_ty(&mut self, t: &Ty, db: chalk_ir::DebruijnIndex) -> ControlFlow<()> { + if t == self.expected_ty { + ControlFlow::Break(()) + } else { + t.super_visit_with(self, db) } } - if inferred_sig - .visit_with( - &mut MentionsTy { expected_ty }, - chalk_ir::DebruijnIndex::INNERMOST, - ) - .is_continue() - { - expected_sig = inferred_sig; - } + } + if inferred_sig + .visit_with(&mut MentionsTy { expected_ty }, chalk_ir::DebruijnIndex::INNERMOST) + .is_continue() + { + expected_sig = inferred_sig; } } @@ -617,11 +605,10 @@ impl HirPlace { if let CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow, }) = current_capture + && self.projections[len..].contains(&ProjectionElem::Deref) { - if self.projections[len..].contains(&ProjectionElem::Deref) { - current_capture = - CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture }); - } + current_capture = + CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture }); } current_capture } @@ -1076,12 +1063,11 @@ impl InferenceContext<'_> { Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }), Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared), }; - if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) { - if let Some(place) = + if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) + && let Some(place) = apply_adjusts_to_place(&mut self.current_capture_span_stack, place, rest) - { - self.add_capture(place, capture_kind); - } + { + self.add_capture(place, capture_kind); } self.walk_expr_with_adjust(tgt_expr, rest); } @@ -1169,15 +1155,15 @@ impl InferenceContext<'_> { } } self.walk_expr(*expr); - if let Some(discr_place) = self.place_of_expr(*expr) { - if self.is_upvar(&discr_place) { - let mut capture_mode = None; - for arm in arms.iter() { - self.walk_pat(&mut capture_mode, arm.pat); - } - if let Some(c) = capture_mode { - self.push_capture(discr_place, c); - } + if let Some(discr_place) = self.place_of_expr(*expr) + && self.is_upvar(&discr_place) + { + let mut capture_mode = None; + for arm in arms.iter() { + self.walk_pat(&mut capture_mode, arm.pat); + } + if let Some(c) = capture_mode { + self.push_capture(discr_place, c); } } } @@ -1209,13 +1195,11 @@ impl InferenceContext<'_> { let mutability = 'b: { if let Some(deref_trait) = self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait()) - { - if let Some(deref_fn) = deref_trait + && let Some(deref_fn) = deref_trait .trait_items(self.db) .method_by_name(&Name::new_symbol_root(sym::deref_mut)) - { - break 'b deref_fn == f; - } + { + break 'b deref_fn == f; } false }; @@ -1405,10 +1389,10 @@ impl InferenceContext<'_> { fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty { let mut ty = None; - if let Some(it) = self.result.expr_adjustments.get(&e) { - if let Some(it) = it.last() { - ty = Some(it.target.clone()); - } + if let Some(it) = self.result.expr_adjustments.get(&e) + && let Some(it) = it.last() + { + ty = Some(it.target.clone()); } ty.unwrap_or_else(|| self.expr_ty(e)) } @@ -1793,10 +1777,10 @@ impl InferenceContext<'_> { } pub(super) fn add_current_closure_dependency(&mut self, dep: ClosureId) { - if let Some(c) = self.current_closure { - if !dep_creates_cycle(&self.closure_dependencies, &mut FxHashSet::default(), c, dep) { - self.closure_dependencies.entry(c).or_default().push(dep); - } + if let Some(c) = self.current_closure + && !dep_creates_cycle(&self.closure_dependencies, &mut FxHashSet::default(), c, dep) + { + self.closure_dependencies.entry(c).or_default().push(dep); } fn dep_creates_cycle( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs index 39bd90849fe8f..761a2564aa799 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs @@ -164,14 +164,14 @@ impl CoerceMany { // - [Comment from rustc](https://github.com/rust-lang/rust/blob/5ff18d0eaefd1bd9ab8ec33dab2404a44e7631ed/compiler/rustc_hir_typeck/src/coercion.rs#L1334-L1335) // First try to coerce the new expression to the type of the previous ones, // but only if the new expression has no coercion already applied to it. - if expr.is_none_or(|expr| !ctx.result.expr_adjustments.contains_key(&expr)) { - if let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes) { - self.final_ty = Some(res); - if let Some(expr) = expr { - self.expressions.push(expr); - } - return; + if expr.is_none_or(|expr| !ctx.result.expr_adjustments.contains_key(&expr)) + && let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes) + { + self.final_ty = Some(res); + if let Some(expr) = expr { + self.expressions.push(expr); } + return; } if let Ok((adjustments, res)) = @@ -322,18 +322,13 @@ impl InferenceTable<'_> { // If we are coercing into a TAIT, coerce into its proxy inference var, instead. let mut to_ty = to_ty; let _to; - if let Some(tait_table) = &self.tait_coercion_table { - if let TyKind::OpaqueType(opaque_ty_id, _) = to_ty.kind(Interner) { - if !matches!( - from_ty.kind(Interner), - TyKind::InferenceVar(..) | TyKind::OpaqueType(..) - ) { - if let Some(ty) = tait_table.get(opaque_ty_id) { - _to = ty.clone(); - to_ty = &_to; - } - } - } + if let Some(tait_table) = &self.tait_coercion_table + && let TyKind::OpaqueType(opaque_ty_id, _) = to_ty.kind(Interner) + && !matches!(from_ty.kind(Interner), TyKind::InferenceVar(..) | TyKind::OpaqueType(..)) + && let Some(ty) = tait_table.get(opaque_ty_id) + { + _to = ty.clone(); + to_ty = &_to; } // Consider coercing the subtype to a DST @@ -594,14 +589,13 @@ impl InferenceTable<'_> { F: FnOnce(Ty) -> Vec, G: FnOnce(Ty) -> Vec, { - if let TyKind::Function(to_fn_ptr) = to_ty.kind(Interner) { - if let (chalk_ir::Safety::Safe, chalk_ir::Safety::Unsafe) = + if let TyKind::Function(to_fn_ptr) = to_ty.kind(Interner) + && let (chalk_ir::Safety::Safe, chalk_ir::Safety::Unsafe) = (from_fn_ptr.sig.safety, to_fn_ptr.sig.safety) - { - let from_unsafe = - TyKind::Function(safe_to_unsafe_fn_ty(from_fn_ptr.clone())).intern(Interner); - return self.unify_and(&from_unsafe, to_ty, to_unsafe); - } + { + let from_unsafe = + TyKind::Function(safe_to_unsafe_fn_ty(from_fn_ptr.clone())).intern(Interner); + return self.unify_and(&from_unsafe, to_ty, to_unsafe); } self.unify_and(&from_ty, to_ty, normal) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index d43c99fc28271..16fc2bfc0631f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -653,19 +653,18 @@ impl InferenceContext<'_> { // FIXME: Note down method resolution her match op { UnaryOp::Deref => { - if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref) { - if let Some(deref_fn) = deref_trait + if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref) + && let Some(deref_fn) = deref_trait .trait_items(self.db) .method_by_name(&Name::new_symbol_root(sym::deref)) - { - // FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that - // the mutability is not wrong, and will be fixed in `self.infer_mut`). - self.write_method_resolution( - tgt_expr, - deref_fn, - Substitution::empty(Interner), - ); - } + { + // FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that + // the mutability is not wrong, and will be fixed in `self.infer_mut`). + self.write_method_resolution( + tgt_expr, + deref_fn, + Substitution::empty(Interner), + ); } if let Some(derefed) = builtin_deref(self.table.db, &inner_ty, true) { self.resolve_ty_shallow(derefed) @@ -1387,28 +1386,28 @@ impl InferenceContext<'_> { let ret_ty = match method_ty.callable_sig(self.db) { Some(sig) => { let p_left = &sig.params()[0]; - if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. }) { - if let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner) { - self.write_expr_adj( - lhs, - Box::new([Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), - target: p_left.clone(), - }]), - ); - } + if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. }) + && let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner) + { + self.write_expr_adj( + lhs, + Box::new([Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), + target: p_left.clone(), + }]), + ); } let p_right = &sig.params()[1]; - if matches!(op, BinaryOp::CmpOp(..)) { - if let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner) { - self.write_expr_adj( - rhs, - Box::new([Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), - target: p_right.clone(), - }]), - ); - } + if matches!(op, BinaryOp::CmpOp(..)) + && let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner) + { + self.write_expr_adj( + rhs, + Box::new([Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), + target: p_right.clone(), + }]), + ); } sig.ret().clone() } @@ -1664,14 +1663,12 @@ impl InferenceContext<'_> { Some((ty, field_id, adjustments, is_public)) => { self.write_expr_adj(receiver, adjustments.into_boxed_slice()); self.result.field_resolutions.insert(tgt_expr, field_id); - if !is_public { - if let Either::Left(field) = field_id { - // FIXME: Merge this diagnostic into UnresolvedField? - self.push_diagnostic(InferenceDiagnostic::PrivateField { - expr: tgt_expr, - field, - }); - } + if !is_public && let Either::Left(field) = field_id { + // FIXME: Merge this diagnostic into UnresolvedField? + self.push_diagnostic(InferenceDiagnostic::PrivateField { + expr: tgt_expr, + field, + }); } ty } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs index 3f7eba9dd18c3..c798e9e050a18 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs @@ -124,53 +124,41 @@ impl InferenceContext<'_> { self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) } &Expr::Index { base, index } => { - if mutability == Mutability::Mut { - if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { - if let Some(index_trait) = - LangItem::IndexMut.resolve_trait(self.db, self.table.trait_env.krate) - { - if let Some(index_fn) = index_trait - .trait_items(self.db) - .method_by_name(&Name::new_symbol_root(sym::index_mut)) - { - *f = index_fn; - let mut base_ty = None; - let base_adjustments = self - .result - .expr_adjustments - .get_mut(&base) - .and_then(|it| it.last_mut()); - if let Some(Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(_, mutability)), - target, - }) = base_adjustments - { - if let TyKind::Ref(_, _, ty) = target.kind(Interner) { - base_ty = Some(ty.clone()); - } - *mutability = Mutability::Mut; - } - - // Apply `IndexMut` obligation for non-assignee expr - if let Some(base_ty) = base_ty { - let index_ty = - if let Some(ty) = self.result.type_of_expr.get(index) { - ty.clone() - } else { - self.infer_expr( - index, - &Expectation::none(), - ExprIsRead::Yes, - ) - }; - let trait_ref = TyBuilder::trait_ref(self.db, index_trait) - .push(base_ty) - .fill(|_| index_ty.clone().cast(Interner)) - .build(); - self.push_obligation(trait_ref.cast(Interner)); - } - } + if mutability == Mutability::Mut + && let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) + && let Some(index_trait) = + LangItem::IndexMut.resolve_trait(self.db, self.table.trait_env.krate) + && let Some(index_fn) = index_trait + .trait_items(self.db) + .method_by_name(&Name::new_symbol_root(sym::index_mut)) + { + *f = index_fn; + let mut base_ty = None; + let base_adjustments = + self.result.expr_adjustments.get_mut(&base).and_then(|it| it.last_mut()); + if let Some(Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(_, mutability)), + target, + }) = base_adjustments + { + if let TyKind::Ref(_, _, ty) = target.kind(Interner) { + base_ty = Some(ty.clone()); } + *mutability = Mutability::Mut; + } + + // Apply `IndexMut` obligation for non-assignee expr + if let Some(base_ty) = base_ty { + let index_ty = if let Some(ty) = self.result.type_of_expr.get(index) { + ty.clone() + } else { + self.infer_expr(index, &Expectation::none(), ExprIsRead::Yes) + }; + let trait_ref = TyBuilder::trait_ref(self.db, index_trait) + .push(base_ty) + .fill(|_| index_ty.clone().cast(Interner)) + .build(); + self.push_obligation(trait_ref.cast(Interner)); } } self.infer_mut_expr(base, mutability); @@ -178,28 +166,23 @@ impl InferenceContext<'_> { } Expr::UnaryOp { expr, op: UnaryOp::Deref } => { let mut mutability = mutability; - if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { - if mutability == Mutability::Mut { - if let Some(deref_trait) = - LangItem::DerefMut.resolve_trait(self.db, self.table.trait_env.krate) - { - let ty = self.result.type_of_expr.get(*expr); - let is_mut_ptr = ty.is_some_and(|ty| { - let ty = self.table.resolve_ty_shallow(ty); - matches!( - ty.kind(Interner), - chalk_ir::TyKind::Raw(Mutability::Mut, _) - ) - }); - if is_mut_ptr { - mutability = Mutability::Not; - } else if let Some(deref_fn) = deref_trait - .trait_items(self.db) - .method_by_name(&Name::new_symbol_root(sym::deref_mut)) - { - *f = deref_fn; - } - } + if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) + && mutability == Mutability::Mut + && let Some(deref_trait) = + LangItem::DerefMut.resolve_trait(self.db, self.table.trait_env.krate) + { + let ty = self.result.type_of_expr.get(*expr); + let is_mut_ptr = ty.is_some_and(|ty| { + let ty = self.table.resolve_ty_shallow(ty); + matches!(ty.kind(Interner), chalk_ir::TyKind::Raw(Mutability::Mut, _)) + }); + if is_mut_ptr { + mutability = Mutability::Not; + } else if let Some(deref_fn) = deref_trait + .trait_items(self.db) + .method_by_name(&Name::new_symbol_root(sym::deref_mut)) + { + *f = deref_fn; } } self.infer_mut_expr(*expr, mutability); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index 18288b718f76d..707bec0fce4ce 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -498,12 +498,12 @@ impl InferenceContext<'_> { // If `expected` is an infer ty, we try to equate it to an array if the given pattern // allows it. See issue #16609 - if self.pat_is_irrefutable(decl) && expected.is_ty_var() { - if let Some(resolved_array_ty) = + if self.pat_is_irrefutable(decl) + && expected.is_ty_var() + && let Some(resolved_array_ty) = self.try_resolve_slice_ty_to_array_ty(prefix, suffix, slice) - { - self.unify(&expected, &resolved_array_ty); - } + { + self.unify(&expected, &resolved_array_ty); } let expected = self.resolve_ty_shallow(&expected); @@ -539,17 +539,16 @@ impl InferenceContext<'_> { fn infer_lit_pat(&mut self, expr: ExprId, expected: &Ty) -> Ty { // Like slice patterns, byte string patterns can denote both `&[u8; N]` and `&[u8]`. - if let Expr::Literal(Literal::ByteString(_)) = self.body[expr] { - if let Some((inner, ..)) = expected.as_reference() { - let inner = self.resolve_ty_shallow(inner); - if matches!(inner.kind(Interner), TyKind::Slice(_)) { - let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner); - let slice_ty = TyKind::Slice(elem_ty).intern(Interner); - let ty = - TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty).intern(Interner); - self.write_expr_ty(expr, ty.clone()); - return ty; - } + if let Expr::Literal(Literal::ByteString(_)) = self.body[expr] + && let Some((inner, ..)) = expected.as_reference() + { + let inner = self.resolve_ty_shallow(inner); + if matches!(inner.kind(Interner), TyKind::Slice(_)) { + let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner); + let slice_ty = TyKind::Slice(elem_ty).intern(Interner); + let ty = TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty).intern(Interner); + self.write_expr_ty(expr, ty.clone()); + return ty; } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index d61e7de6672f1..afee9606bd5f8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -830,10 +830,10 @@ fn named_associated_type_shorthand_candidates( let data = t.hir_trait_id().trait_items(db); for (name, assoc_id) in &data.items { - if let AssocItemId::TypeAliasId(alias) = assoc_id { - if let Some(result) = cb(name, &t, *alias) { - return Some(result); - } + if let AssocItemId::TypeAliasId(alias) = assoc_id + && let Some(result) = cb(name, &t, *alias) + { + return Some(result); } } None diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs index 5c06234fa077f..9519c38eeddfd 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs @@ -360,15 +360,14 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { } } - if let Some(enum_segment) = enum_segment { - if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) - && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) - { - self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { - segment: (enum_segment + 1) as u32, - reason: GenericArgsProhibitedReason::EnumVariant, - }); - } + if let Some(enum_segment) = enum_segment + && segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) + && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) + { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: (enum_segment + 1) as u32, + reason: GenericArgsProhibitedReason::EnumVariant, + }); } self.handle_type_ns_resolution(&resolution); @@ -417,15 +416,14 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { } } - if let Some(enum_segment) = enum_segment { - if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) - && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) - { - self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { - segment: (enum_segment + 1) as u32, - reason: GenericArgsProhibitedReason::EnumVariant, - }); - } + if let Some(enum_segment) = enum_segment + && segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) + && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) + { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: (enum_segment + 1) as u32, + reason: GenericArgsProhibitedReason::EnumVariant, + }); } match &res { @@ -576,13 +574,12 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { // This simplifies the code a bit. let penultimate_idx = self.current_segment_idx.wrapping_sub(1); let penultimate = self.segments.get(penultimate_idx); - if let Some(penultimate) = penultimate { - if self.current_or_prev_segment.args_and_bindings.is_none() - && penultimate.args_and_bindings.is_some() - { - self.current_segment_idx = penultimate_idx; - self.current_or_prev_segment = penultimate; - } + if let Some(penultimate) = penultimate + && self.current_or_prev_segment.args_and_bindings.is_none() + && penultimate.args_and_bindings.is_some() + { + self.current_segment_idx = penultimate_idx; + self.current_or_prev_segment = penultimate; } var.lookup(self.ctx.db).parent.into() } @@ -607,37 +604,36 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { ) -> Substitution { let mut lifetime_elision = self.ctx.lifetime_elision.clone(); - if let Some(args) = self.current_or_prev_segment.args_and_bindings { - if args.parenthesized != GenericArgsParentheses::No { - let prohibit_parens = match def { - GenericDefId::TraitId(trait_) => { - // RTN is prohibited anyways if we got here. - let is_rtn = - args.parenthesized == GenericArgsParentheses::ReturnTypeNotation; - let is_fn_trait = self - .ctx - .db - .trait_signature(trait_) - .flags - .contains(TraitFlags::RUSTC_PAREN_SUGAR); - is_rtn || !is_fn_trait - } - _ => true, - }; - - if prohibit_parens { - let segment = self.current_segment_u32(); - self.on_diagnostic( - PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment }, - ); - - return TyBuilder::unknown_subst(self.ctx.db, def); + if let Some(args) = self.current_or_prev_segment.args_and_bindings + && args.parenthesized != GenericArgsParentheses::No + { + let prohibit_parens = match def { + GenericDefId::TraitId(trait_) => { + // RTN is prohibited anyways if we got here. + let is_rtn = args.parenthesized == GenericArgsParentheses::ReturnTypeNotation; + let is_fn_trait = self + .ctx + .db + .trait_signature(trait_) + .flags + .contains(TraitFlags::RUSTC_PAREN_SUGAR); + is_rtn || !is_fn_trait } + _ => true, + }; - // `Fn()`-style generics are treated like functions for the purpose of lifetime elision. - lifetime_elision = - LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false }; + if prohibit_parens { + let segment = self.current_segment_u32(); + self.on_diagnostic( + PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment }, + ); + + return TyBuilder::unknown_subst(self.ctx.db, def); } + + // `Fn()`-style generics are treated like functions for the purpose of lifetime elision. + lifetime_elision = + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false }; } self.substs_from_args_and_bindings( @@ -753,18 +749,20 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { match param { GenericParamDataRef::LifetimeParamData(_) => error_lifetime().cast(Interner), GenericParamDataRef::TypeParamData(param) => { - if !infer_args && param.default.is_some() { - if let Some(default) = default() { - return default; - } + if !infer_args + && param.default.is_some() + && let Some(default) = default() + { + return default; } TyKind::Error.intern(Interner).cast(Interner) } GenericParamDataRef::ConstParamData(param) => { - if !infer_args && param.default.is_some() { - if let Some(default) = default() { - return default; - } + if !infer_args + && param.default.is_some() + && let Some(default) = default() + { + return default; } let GenericParamId::ConstParamId(const_id) = param_id else { unreachable!("non-const param ID for const param"); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index a6150a9bc1728..b22781e947013 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -581,15 +581,15 @@ impl ReceiverAdjustments { } if self.unsize_array { ty = 'it: { - if let TyKind::Ref(m, l, inner) = ty.kind(Interner) { - if let TyKind::Array(inner, _) = inner.kind(Interner) { - break 'it TyKind::Ref( - *m, - l.clone(), - TyKind::Slice(inner.clone()).intern(Interner), - ) - .intern(Interner); - } + if let TyKind::Ref(m, l, inner) = ty.kind(Interner) + && let TyKind::Array(inner, _) = inner.kind(Interner) + { + break 'it TyKind::Ref( + *m, + l.clone(), + TyKind::Slice(inner.clone()).intern(Interner), + ) + .intern(Interner); } // FIXME: report diagnostic if array unsizing happens without indirection. ty @@ -1549,11 +1549,11 @@ fn is_valid_impl_method_candidate( check_that!(receiver_ty.is_none()); check_that!(name.is_none_or(|n| n == item_name)); - if let Some(from_module) = visible_from_module { - if !db.assoc_visibility(c.into()).is_visible_from(db, from_module) { - cov_mark::hit!(const_candidate_not_visible); - return IsValidCandidate::NotVisible; - } + if let Some(from_module) = visible_from_module + && !db.assoc_visibility(c.into()).is_visible_from(db, from_module) + { + cov_mark::hit!(const_candidate_not_visible); + return IsValidCandidate::NotVisible; } let self_ty_matches = table.run_in_snapshot(|table| { let expected_self_ty = @@ -1638,11 +1638,11 @@ fn is_valid_impl_fn_candidate( let db = table.db; let data = db.function_signature(fn_id); - if let Some(from_module) = visible_from_module { - if !db.assoc_visibility(fn_id.into()).is_visible_from(db, from_module) { - cov_mark::hit!(autoderef_candidate_not_visible); - return IsValidCandidate::NotVisible; - } + if let Some(from_module) = visible_from_module + && !db.assoc_visibility(fn_id.into()).is_visible_from(db, from_module) + { + cov_mark::hit!(autoderef_candidate_not_visible); + return IsValidCandidate::NotVisible; } table.run_in_snapshot(|table| { let _p = tracing::info_span!("subst_for_def").entered(); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs index fb0c0dee095f1..52df851c30d13 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs @@ -559,10 +559,9 @@ fn mutability_of_locals( }, p, ) = value + && place_case(db, body, p) != ProjectionCase::Indirect { - if place_case(db, body, p) != ProjectionCase::Indirect { - push_mut_span(p.local, statement.span, &mut result); - } + push_mut_span(p.local, statement.span, &mut result); } } StatementKind::FakeRead(p) => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index 9a97bd6dbe293..dfb8ae704b996 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -1082,18 +1082,18 @@ impl Evaluator<'_> { let stack_size = { let mut stack_ptr = self.stack.len(); for (id, it) in body.locals.iter() { - if id == return_slot() { - if let Some(destination) = destination { - locals.ptr.insert(id, destination); - continue; - } + if id == return_slot() + && let Some(destination) = destination + { + locals.ptr.insert(id, destination); + continue; } let (size, align) = self.size_align_of_sized( &it.ty, &locals, "no unsized local in extending stack", )?; - while stack_ptr % align != 0 { + while !stack_ptr.is_multiple_of(align) { stack_ptr += 1; } let my_ptr = stack_ptr; @@ -1673,14 +1673,14 @@ impl Evaluator<'_> { if let Some(it) = goal(kind) { return Ok(it); } - if let TyKind::Adt(id, subst) = kind { - if let AdtId::StructId(struct_id) = id.0 { - let field_types = self.db.field_types(struct_id.into()); - if let Some(ty) = - field_types.iter().last().map(|it| it.1.clone().substitute(Interner, subst)) - { - return self.coerce_unsized_look_through_fields(&ty, goal); - } + if let TyKind::Adt(id, subst) = kind + && let AdtId::StructId(struct_id) = id.0 + { + let field_types = self.db.field_types(struct_id.into()); + if let Some(ty) = + field_types.iter().last().map(|it| it.1.clone().substitute(Interner, subst)) + { + return self.coerce_unsized_look_through_fields(&ty, goal); } } Err(MirEvalError::CoerceUnsizedError(ty.clone())) @@ -1778,17 +1778,15 @@ impl Evaluator<'_> { locals: &Locals, ) -> Result<(usize, Arc, Option<(usize, usize, i128)>)> { let adt = it.adt_id(self.db); - if let DefWithBodyId::VariantId(f) = locals.body.owner { - if let VariantId::EnumVariantId(it) = it { - if let AdtId::EnumId(e) = adt { - if f.lookup(self.db).parent == e { - // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and - // infinite sized type errors) we use a dummy layout - let i = self.const_eval_discriminant(it)?; - return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i)))); - } - } - } + if let DefWithBodyId::VariantId(f) = locals.body.owner + && let VariantId::EnumVariantId(it) = it + && let AdtId::EnumId(e) = adt + && f.lookup(self.db).parent == e + { + // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and + // infinite sized type errors) we use a dummy layout + let i = self.const_eval_discriminant(it)?; + return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i)))); } let layout = self.layout_adt(adt, subst)?; Ok(match &layout.variants { @@ -1909,10 +1907,10 @@ impl Evaluator<'_> { let name = const_id.name(self.db); MirEvalError::ConstEvalError(name, Box::new(e)) })?; - if let chalk_ir::ConstValue::Concrete(c) = &result_owner.data(Interner).value { - if let ConstScalar::Bytes(v, mm) = &c.interned { - break 'b (v, mm); - } + if let chalk_ir::ConstValue::Concrete(c) = &result_owner.data(Interner).value + && let ConstScalar::Bytes(v, mm) = &c.interned + { + break 'b (v, mm); } not_supported!("unevaluatable constant"); } @@ -2055,14 +2053,13 @@ impl Evaluator<'_> { .is_sized() .then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize))); } - if let DefWithBodyId::VariantId(f) = locals.body.owner { - if let Some((AdtId::EnumId(e), _)) = ty.as_adt() { - if f.lookup(self.db).parent == e { - // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and - // infinite sized type errors) we use a dummy size - return Ok(Some((16, 16))); - } - } + if let DefWithBodyId::VariantId(f) = locals.body.owner + && let Some((AdtId::EnumId(e), _)) = ty.as_adt() + && f.lookup(self.db).parent == e + { + // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and + // infinite sized type errors) we use a dummy size + return Ok(Some((16, 16))); } let layout = self.layout(ty); if self.assert_placeholder_ty_is_unused @@ -2103,7 +2100,7 @@ impl Evaluator<'_> { if !align.is_power_of_two() || align > 10000 { return Err(MirEvalError::UndefinedBehavior(format!("Alignment {align} is invalid"))); } - while self.heap.len() % align != 0 { + while !self.heap.len().is_multiple_of(align) { self.heap.push(0); } if size.checked_add(self.heap.len()).is_none_or(|x| x > self.memory_limit) { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index e9665d5ae9cf1..bb4c963a8ae15 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -119,25 +119,25 @@ impl Evaluator<'_> { destination.write_from_bytes(self, &result)?; return Ok(true); } - if let ItemContainerId::TraitId(t) = def.lookup(self.db).container { - if self.db.lang_attr(t.into()) == Some(LangItem::Clone) { - let [self_ty] = generic_args.as_slice(Interner) else { - not_supported!("wrong generic arg count for clone"); - }; - let Some(self_ty) = self_ty.ty(Interner) else { - not_supported!("wrong generic arg kind for clone"); - }; - // Clone has special impls for tuples and function pointers - if matches!( - self_ty.kind(Interner), - TyKind::Function(_) | TyKind::Tuple(..) | TyKind::Closure(..) - ) { - self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?; - return Ok(true); - } - // Return early to prevent caching clone as non special fn. - return Ok(false); + if let ItemContainerId::TraitId(t) = def.lookup(self.db).container + && self.db.lang_attr(t.into()) == Some(LangItem::Clone) + { + let [self_ty] = generic_args.as_slice(Interner) else { + not_supported!("wrong generic arg count for clone"); + }; + let Some(self_ty) = self_ty.ty(Interner) else { + not_supported!("wrong generic arg kind for clone"); + }; + // Clone has special impls for tuples and function pointers + if matches!( + self_ty.kind(Interner), + TyKind::Function(_) | TyKind::Tuple(..) | TyKind::Closure(..) + ) { + self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?; + return Ok(true); } + // Return early to prevent caching clone as non special fn. + return Ok(false); } self.not_special_fn_cache.borrow_mut().insert(def); Ok(false) @@ -1256,23 +1256,22 @@ impl Evaluator<'_> { let addr = tuple.interval.addr.offset(offset); args.push(IntervalAndTy::new(addr, field, self, locals)?); } - if let Some(target) = LangItem::FnOnce.resolve_trait(self.db, self.crate_id) { - if let Some(def) = target + if let Some(target) = LangItem::FnOnce.resolve_trait(self.db, self.crate_id) + && let Some(def) = target .trait_items(self.db) .method_by_name(&Name::new_symbol_root(sym::call_once)) - { - self.exec_fn_trait( - def, - &args, - // FIXME: wrong for manual impls of `FnOnce` - Substitution::empty(Interner), - locals, - destination, - None, - span, - )?; - return Ok(true); - } + { + self.exec_fn_trait( + def, + &args, + // FIXME: wrong for manual impls of `FnOnce` + Substitution::empty(Interner), + locals, + destination, + None, + span, + )?; + return Ok(true); } not_supported!("FnOnce was not available for executing const_eval_select"); } @@ -1367,12 +1366,11 @@ impl Evaluator<'_> { break; } } - if signed { - if let Some((&l, &r)) = lhs.iter().zip(rhs).next_back() { - if l != r { - result = (l as i8).cmp(&(r as i8)); - } - } + if signed + && let Some((&l, &r)) = lhs.iter().zip(rhs).next_back() + && l != r + { + result = (l as i8).cmp(&(r as i8)); } if let Some(e) = LangItem::Ordering.resolve_enum(self.db, self.crate_id) { let ty = self.db.ty(e.into()); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs index bc331a23d98e3..f554772904537 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs @@ -114,12 +114,11 @@ impl Evaluator<'_> { break; } } - if is_signed { - if let Some((&l, &r)) = l.iter().zip(r).next_back() { - if l != r { - result = (l as i8).cmp(&(r as i8)); - } - } + if is_signed + && let Some((&l, &r)) = l.iter().zip(r).next_back() + && l != r + { + result = (l as i8).cmp(&(r as i8)); } let result = match result { Ordering::Less => ["lt", "le", "ne"].contains(&name), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 07d814727293e..eb80e8706fa0c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -320,11 +320,11 @@ impl<'ctx> MirLowerCtx<'ctx> { expr_id: ExprId, current: BasicBlockId, ) -> Result> { - if !self.has_adjustments(expr_id) { - if let Expr::Literal(l) = &self.body[expr_id] { - let ty = self.expr_ty_without_adjust(expr_id); - return Ok(Some((self.lower_literal_to_operand(ty, l)?, current))); - } + if !self.has_adjustments(expr_id) + && let Expr::Literal(l) = &self.body[expr_id] + { + let ty = self.expr_ty_without_adjust(expr_id); + return Ok(Some((self.lower_literal_to_operand(ty, l)?, current))); } let Some((p, current)) = self.lower_expr_as_place(current, expr_id, true)? else { return Ok(None); @@ -1039,18 +1039,18 @@ impl<'ctx> MirLowerCtx<'ctx> { && rhs_ty.is_scalar() && (lhs_ty == rhs_ty || builtin_inequal_impls) }; - if !is_builtin { - if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) { - let func = Operand::from_fn(self.db, func_id, generic_args); - return self.lower_call_and_args( - func, - [*lhs, *rhs].into_iter(), - place, - current, - self.is_uninhabited(expr_id), - expr_id.into(), - ); - } + if !is_builtin + && let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) + { + let func = Operand::from_fn(self.db, func_id, generic_args); + return self.lower_call_and_args( + func, + [*lhs, *rhs].into_iter(), + place, + current, + self.is_uninhabited(expr_id), + expr_id.into(), + ); } if let hir_def::hir::BinaryOp::Assignment { op: Some(op) } = op { // last adjustment is `&mut` which we don't want it. @@ -1596,10 +1596,10 @@ impl<'ctx> MirLowerCtx<'ctx> { fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty { let mut ty = None; - if let Some(it) = self.infer.expr_adjustments.get(&e) { - if let Some(it) = it.last() { - ty = Some(it.target.clone()); - } + if let Some(it) = self.infer.expr_adjustments.get(&e) + && let Some(it) = it.last() + { + ty = Some(it.target.clone()); } ty.unwrap_or_else(|| self.expr_ty_without_adjust(e)) } @@ -1848,13 +1848,13 @@ impl<'ctx> MirLowerCtx<'ctx> { self.result.param_locals.extend(params.clone().map(|(it, ty)| { let local_id = self.result.locals.alloc(Local { ty }); self.drop_scopes.last_mut().unwrap().locals.push(local_id); - if let Pat::Bind { id, subpat: None } = self.body[it] { - if matches!( + if let Pat::Bind { id, subpat: None } = self.body[it] + && matches!( self.body[id].mode, BindingAnnotation::Unannotated | BindingAnnotation::Mutable - ) { - self.result.binding_locals.insert(id, local_id); - } + ) + { + self.result.binding_locals.insert(id, local_id); } local_id })); @@ -1887,10 +1887,10 @@ impl<'ctx> MirLowerCtx<'ctx> { .into_iter() .skip(base_param_count + self_binding.is_some() as usize); for ((param, _), local) in params.zip(local_params) { - if let Pat::Bind { id, .. } = self.body[param] { - if local == self.binding_local(id)? { - continue; - } + if let Pat::Bind { id, .. } = self.body[param] + && local == self.binding_local(id)? + { + continue; } let r = self.pattern_match(current, None, local.into(), param)?; if let Some(b) = r.1 { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs index e074c2d558e84..42a14664626f0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs @@ -189,17 +189,14 @@ impl MirLowerCtx<'_> { self.expr_ty_without_adjust(expr_id), expr_id.into(), 'b: { - if let Some((f, _)) = self.infer.method_resolution(expr_id) { - if let Some(deref_trait) = + if let Some((f, _)) = self.infer.method_resolution(expr_id) + && let Some(deref_trait) = self.resolve_lang_item(LangItem::DerefMut)?.as_trait() - { - if let Some(deref_fn) = deref_trait - .trait_items(self.db) - .method_by_name(&Name::new_symbol_root(sym::deref_mut)) - { - break 'b deref_fn == f; - } - } + && let Some(deref_fn) = deref_trait + .trait_items(self.db) + .method_by_name(&Name::new_symbol_root(sym::deref_mut)) + { + break 'b deref_fn == f; } false }, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index 3325226b1d369..0440d85022321 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -317,27 +317,26 @@ impl MirLowerCtx<'_> { (current, current_else) = self.pattern_match_inner(current, current_else, next_place, pat, mode)?; } - if let &Some(slice) = slice { - if mode != MatchingMode::Check { - if let Pat::Bind { id, subpat: _ } = self.body[slice] { - let next_place = cond_place.project( - ProjectionElem::Subslice { - from: prefix.len() as u64, - to: suffix.len() as u64, - }, - &mut self.result.projection_store, - ); - let mode = self.infer.binding_modes[slice]; - (current, current_else) = self.pattern_match_binding( - id, - mode, - next_place, - (slice).into(), - current, - current_else, - )?; - } - } + if let &Some(slice) = slice + && mode != MatchingMode::Check + && let Pat::Bind { id, subpat: _ } = self.body[slice] + { + let next_place = cond_place.project( + ProjectionElem::Subslice { + from: prefix.len() as u64, + to: suffix.len() as u64, + }, + &mut self.result.projection_store, + ); + let mode = self.infer.binding_modes[slice]; + (current, current_else) = self.pattern_match_binding( + id, + mode, + next_place, + (slice).into(), + current, + current_else, + )?; } for (i, &pat) in suffix.iter().enumerate() { let next_place = cond_place.project( @@ -391,10 +390,10 @@ impl MirLowerCtx<'_> { return Ok((current, current_else)); } let (c, subst) = 'b: { - if let Some(x) = self.infer.assoc_resolutions_for_pat(pattern) { - if let AssocItemId::ConstId(c) = x.0 { - break 'b (c, x.1); - } + if let Some(x) = self.infer.assoc_resolutions_for_pat(pattern) + && let AssocItemId::ConstId(c) = x.0 + { + break 'b (c, x.1); } if let ResolveValueResult::ValueNs(ValueNs::ConstId(c), _) = pr { break 'b (c, Substitution::empty(Interner)); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs index 7414b4fc6070e..08b9d242e71d2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs @@ -125,11 +125,10 @@ pub(crate) fn trait_solve_query( alias: AliasTy::Projection(projection_ty), .. }))) = &goal.value.goal.data(Interner) + && let TyKind::BoundVar(_) = projection_ty.self_type_parameter(db).kind(Interner) { - if let TyKind::BoundVar(_) = projection_ty.self_type_parameter(db).kind(Interner) { - // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible - return Some(Solution::Ambig(Guidance::Unknown)); - } + // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible + return Some(Solution::Ambig(Guidance::Unknown)); } // Chalk see `UnevaluatedConst` as a unique concrete value, but we see it as an alias for another const. So diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index d07c1aa33b407..209ec7926e825 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -333,13 +333,13 @@ impl FallibleTypeFolder for UnevaluatedConstEvaluatorFolder<'_> { constant: Const, _outer_binder: DebruijnIndex, ) -> Result { - if let chalk_ir::ConstValue::Concrete(c) = &constant.data(Interner).value { - if let ConstScalar::UnevaluatedConst(id, subst) = &c.interned { - if let Ok(eval) = self.db.const_eval(*id, subst.clone(), None) { - return Ok(eval); - } else { - return Ok(unknown_const(constant.data(Interner).ty.clone())); - } + if let chalk_ir::ConstValue::Concrete(c) = &constant.data(Interner).value + && let ConstScalar::UnevaluatedConst(id, subst) = &c.interned + { + if let Ok(eval) = self.db.const_eval(*id, subst.clone(), None) { + return Ok(eval); + } else { + return Ok(unknown_const(constant.data(Interner).ty.clone())); } } Ok(constant) diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index c1e814ec223e3..fca0162765ecf 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -604,13 +604,13 @@ impl<'db> AnyDiagnostic<'db> { } } BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr } => { - if let Ok(source_ptr) = source_map.expr_syntax(if_expr) { - if let Some(ptr) = source_ptr.value.cast::() { - return Some( - RemoveUnnecessaryElse { if_expr: InFile::new(source_ptr.file_id, ptr) } - .into(), - ); - } + if let Ok(source_ptr) = source_map.expr_syntax(if_expr) + && let Some(ptr) = source_ptr.value.cast::() + { + return Some( + RemoveUnnecessaryElse { if_expr: InFile::new(source_ptr.file_id, ptr) } + .into(), + ); } } } diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 4ddb04b24f7f2..a323f97997c68 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -1020,21 +1020,21 @@ fn emit_macro_def_diagnostics<'db>( m: Macro, ) { let id = db.macro_def(m.id); - if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) { - if let Some(e) = expander.mac.err() { - let Some(ast) = id.ast_id().left() else { - never!("declarative expander for non decl-macro: {:?}", e); - return; - }; - let krate = HasModule::krate(&m.id, db); - let edition = krate.data(db).edition; - emit_def_diagnostic_( - db, - acc, - &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() }, - edition, - ); - } + if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) + && let Some(e) = expander.mac.err() + { + let Some(ast) = id.ast_id().left() else { + never!("declarative expander for non decl-macro: {:?}", e); + return; + }; + let krate = HasModule::krate(&m.id, db); + let edition = krate.data(db).edition; + emit_def_diagnostic_( + db, + acc, + &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() }, + edition, + ); } } @@ -2564,10 +2564,10 @@ impl<'db> Param<'db> { Callee::Closure(closure, _) => { let c = db.lookup_intern_closure(closure.into()); let body = db.body(c.0); - if let Expr::Closure { args, .. } = &body[c.1] { - if let Pat::Bind { id, .. } = &body[args[self.idx]] { - return Some(Local { parent: c.0, binding_id: *id }); - } + if let Expr::Closure { args, .. } = &body[c.1] + && let Pat::Bind { id, .. } = &body[args[self.idx]] + { + return Some(Local { parent: c.0, binding_id: *id }); } None } @@ -2761,26 +2761,20 @@ impl EvaluatedConst { pub fn render_debug(&self, db: &dyn HirDatabase) -> Result { let data = self.const_.data(Interner); - if let TyKind::Scalar(s) = data.ty.kind(Interner) { - if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) { - if let hir_ty::ConstValue::Concrete(c) = &data.value { - if let hir_ty::ConstScalar::Bytes(b, _) = &c.interned { - let value = u128::from_le_bytes(mir::pad16(b, false)); - let value_signed = - i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_)))); - let mut result = if let Scalar::Int(_) = s { - value_signed.to_string() - } else { - value.to_string() - }; - if value >= 10 { - format_to!(result, " ({value:#X})"); - return Ok(result); - } else { - return Ok(result); - } - } - } + if let TyKind::Scalar(s) = data.ty.kind(Interner) + && matches!(s, Scalar::Int(_) | Scalar::Uint(_)) + && let hir_ty::ConstValue::Concrete(c) = &data.value + && let hir_ty::ConstScalar::Bytes(b, _) = &c.interned + { + let value = u128::from_le_bytes(mir::pad16(b, false)); + let value_signed = i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_)))); + let mut result = + if let Scalar::Int(_) = s { value_signed.to_string() } else { value.to_string() }; + if value >= 10 { + format_to!(result, " ({value:#X})"); + return Ok(result); + } else { + return Ok(result); } } mir::render_const_using_debug_impl(db, self.def, &self.const_) @@ -4421,10 +4415,10 @@ impl Impl { let impls = db.trait_impls_in_crate(id); all.extend(impls.for_trait(trait_.id).map(Self::from)) } - if let Some(block) = module.id.containing_block() { - if let Some(trait_impls) = db.trait_impls_in_block(block) { - all.extend(trait_impls.for_trait(trait_.id).map(Self::from)); - } + if let Some(block) = module.id.containing_block() + && let Some(trait_impls) = db.trait_impls_in_block(block) + { + all.extend(trait_impls.for_trait(trait_.id).map(Self::from)); } all } diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index adba59236a40f..d207305b4c61f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -933,19 +933,18 @@ impl<'db> SemanticsImpl<'db> { InFile::new(file.file_id, last), false, &mut |InFile { value: last, file_id: last_fid }, _ctx| { - if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { - if first_fid == last_fid { - if let Some(p) = first.parent() { - let range = first.text_range().cover(last.text_range()); - let node = find_root(&p) - .covering_element(range) - .ancestors() - .take_while(|it| it.text_range() == range) - .find_map(N::cast); - if let Some(node) = node { - res.push(node); - } - } + if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() + && first_fid == last_fid + && let Some(p) = first.parent() + { + let range = first.text_range().cover(last.text_range()); + let node = find_root(&p) + .covering_element(range) + .ancestors() + .take_while(|it| it.text_range() == range) + .find_map(N::cast); + if let Some(node) = node { + res.push(node); } } }, @@ -1391,10 +1390,10 @@ impl<'db> SemanticsImpl<'db> { } })() .is_none(); - if was_not_remapped { - if let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) { - return Some(b); - } + if was_not_remapped + && let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) + { + return Some(b); } } } @@ -2068,14 +2067,12 @@ impl<'db> SemanticsImpl<'db> { break false; } - if let Some(parent) = ast::Expr::cast(parent.clone()) { - if let Some(ExprOrPatId::ExprId(expr_id)) = + if let Some(parent) = ast::Expr::cast(parent.clone()) + && let Some(ExprOrPatId::ExprId(expr_id)) = source_map.node_expr(InFile { file_id, value: &parent }) - { - if let Expr::Unsafe { .. } = body[expr_id] { - break true; - } - } + && let Expr::Unsafe { .. } = body[expr_id] + { + break true; } let Some(parent_) = parent.parent() else { break false }; @@ -2354,32 +2351,30 @@ struct RenameConflictsVisitor<'a> { impl RenameConflictsVisitor<'_> { fn resolve_path(&mut self, node: ExprOrPatId, path: &Path) { - if let Path::BarePath(path) = path { - if let Some(name) = path.as_ident() { - if *name.symbol() == self.new_name { - if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed( - self.db, - name, - path, - self.body.expr_or_pat_path_hygiene(node), - self.to_be_renamed, - ) { - self.conflicts.insert(conflicting); - } - } else if *name.symbol() == self.old_name { - if let Some(conflicting) = - self.resolver.rename_will_conflict_with_another_variable( - self.db, - name, - path, - self.body.expr_or_pat_path_hygiene(node), - &self.new_name, - self.to_be_renamed, - ) - { - self.conflicts.insert(conflicting); - } + if let Path::BarePath(path) = path + && let Some(name) = path.as_ident() + { + if *name.symbol() == self.new_name { + if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed( + self.db, + name, + path, + self.body.expr_or_pat_path_hygiene(node), + self.to_be_renamed, + ) { + self.conflicts.insert(conflicting); } + } else if *name.symbol() == self.old_name + && let Some(conflicting) = self.resolver.rename_will_conflict_with_another_variable( + self.db, + name, + path, + self.body.expr_or_pat_path_hygiene(node), + &self.new_name, + self.to_be_renamed, + ) + { + self.conflicts.insert(conflicting); } } } diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index 0b554a9d4e37a..d25fb1d8cdb7e 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -995,11 +995,11 @@ impl<'db> SourceAnalyzer<'db> { // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are // trying to resolve foo::bar. - if let Some(use_tree) = parent().and_then(ast::UseTree::cast) { - if use_tree.coloncolon_token().is_some() { - return resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &store) - .map(|it| (it, None)); - } + if let Some(use_tree) = parent().and_then(ast::UseTree::cast) + && use_tree.coloncolon_token().is_some() + { + return resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &store) + .map(|it| (it, None)); } let meta_path = path @@ -1035,24 +1035,19 @@ impl<'db> SourceAnalyzer<'db> { // } // ``` Some(it) if matches!(it, PathResolution::Def(ModuleDef::BuiltinType(_))) => { - if let Some(mod_path) = hir_path.mod_path() { - if let Some(ModuleDefId::ModuleId(id)) = + if let Some(mod_path) = hir_path.mod_path() + && let Some(ModuleDefId::ModuleId(id)) = self.resolver.resolve_module_path_in_items(db, mod_path).take_types() + { + let parent_hir_name = parent_hir_path.segments().get(1).map(|it| it.name); + let module = crate::Module { id }; + if module + .scope(db, None) + .into_iter() + .any(|(name, _)| Some(&name) == parent_hir_name) { - let parent_hir_name = - parent_hir_path.segments().get(1).map(|it| it.name); - let module = crate::Module { id }; - if module - .scope(db, None) - .into_iter() - .any(|(name, _)| Some(&name) == parent_hir_name) - { - return Some(( - PathResolution::Def(ModuleDef::Module(module)), - None, - )); - }; - } + return Some((PathResolution::Def(ModuleDef::Module(module)), None)); + }; } Some((it, None)) } @@ -1282,22 +1277,22 @@ impl<'db> SourceAnalyzer<'db> { db: &'db dyn HirDatabase, macro_expr: InFile<&ast::MacroExpr>, ) -> bool { - if let Some((def, body, sm, Some(infer))) = self.body_() { - if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr) { - let mut is_unsafe = false; - let mut walk_expr = |expr_id| { - unsafe_operations(db, infer, def, body, expr_id, &mut |inside_unsafe_block| { - is_unsafe |= inside_unsafe_block == InsideUnsafeBlock::No - }) - }; - match expanded_expr { - ExprOrPatId::ExprId(expanded_expr) => walk_expr(expanded_expr), - ExprOrPatId::PatId(expanded_pat) => { - body.walk_exprs_in_pat(expanded_pat, &mut walk_expr) - } + if let Some((def, body, sm, Some(infer))) = self.body_() + && let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr) + { + let mut is_unsafe = false; + let mut walk_expr = |expr_id| { + unsafe_operations(db, infer, def, body, expr_id, &mut |inside_unsafe_block| { + is_unsafe |= inside_unsafe_block == InsideUnsafeBlock::No + }) + }; + match expanded_expr { + ExprOrPatId::ExprId(expanded_expr) => walk_expr(expanded_expr), + ExprOrPatId::PatId(expanded_pat) => { + body.walk_exprs_in_pat(expanded_pat, &mut walk_expr) } - return is_unsafe; } + return is_unsafe; } false } @@ -1575,12 +1570,11 @@ fn resolve_hir_path_( // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type // within the trait's associated types. - if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) { - if let Some(type_alias_id) = + if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) + && let Some(type_alias_id) = trait_id.trait_items(db).associated_type_by_name(unresolved.name) - { - return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); - } + { + return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); } let res = match ty { @@ -1726,12 +1720,11 @@ fn resolve_hir_path_qualifier( // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type // within the trait's associated types. - if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) { - if let Some(type_alias_id) = + if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) + && let Some(type_alias_id) = trait_id.trait_items(db).associated_type_by_name(unresolved.name) - { - return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); - } + { + return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); } let res = match ty { diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search.rs b/src/tools/rust-analyzer/crates/hir/src/term_search.rs index 4b354e640628d..e4089218305ce 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search.rs @@ -122,10 +122,10 @@ impl<'db> LookupTable<'db> { } // Collapse suggestions if there are many - if let Some(res) = &res { - if res.len() > self.many_threshold { - return Some(vec![Expr::Many(ty.clone())]); - } + if let Some(res) = &res + && res.len() > self.many_threshold + { + return Some(vec![Expr::Many(ty.clone())]); } res @@ -160,10 +160,10 @@ impl<'db> LookupTable<'db> { } // Collapse suggestions if there are many - if let Some(res) = &res { - if res.len() > self.many_threshold { - return Some(vec![Expr::Many(ty.clone())]); - } + if let Some(res) = &res + && res.len() > self.many_threshold + { + return Some(vec![Expr::Many(ty.clone())]); } res diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs index 843831948adc8..78f534d014b90 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs @@ -336,10 +336,10 @@ impl<'db> Expr<'db> { if let Expr::Method { func, params, .. } = self { res.extend(params.iter().flat_map(|it| it.traits_used(db))); - if let Some(it) = func.as_assoc_item(db) { - if let Some(it) = it.container_or_implemented_trait(db) { - res.push(it); - } + if let Some(it) = func.as_assoc_item(db) + && let Some(it) = it.container_or_implemented_trait(db) + { + res.push(it); } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs index dcdc7ea9cdced..27dbdcf2c4d57 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs @@ -82,10 +82,10 @@ fn fetch_borrowed_types(node: &ast::Adt) -> Option> { record_field_list .fields() .filter_map(|r_field| { - if let ast::Type::RefType(ref_type) = r_field.ty()? { - if ref_type.lifetime().is_none() { - return Some(ref_type); - } + if let ast::Type::RefType(ref_type) = r_field.ty()? + && ref_type.lifetime().is_none() + { + return Some(ref_type); } None @@ -102,10 +102,10 @@ fn find_ref_types_from_field_list(field_list: &ast::FieldList) -> Option record_list .fields() .filter_map(|f| { - if let ast::Type::RefType(ref_type) = f.ty()? { - if ref_type.lifetime().is_none() { - return Some(ref_type); - } + if let ast::Type::RefType(ref_type) = f.ty()? + && ref_type.lifetime().is_none() + { + return Some(ref_type); } None @@ -114,10 +114,10 @@ fn find_ref_types_from_field_list(field_list: &ast::FieldList) -> Option tuple_field_list .fields() .filter_map(|f| { - if let ast::Type::RefType(ref_type) = f.ty()? { - if ref_type.lifetime().is_none() { - return Some(ref_type); - } + if let ast::Type::RefType(ref_type) = f.ty()? + && ref_type.lifetime().is_none() + { + return Some(ref_type); } None diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs index ab183ac70895f..7f1e7ccb4487f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -201,14 +201,12 @@ fn add_missing_impl_members_inner( if let Some(cap) = ctx.config.snippet_cap { let mut placeholder = None; - if let DefaultMethods::No = mode { - if let Some(ast::AssocItem::Fn(func)) = &first_new_item { - if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) - && m.syntax().text() == "todo!()" - { - placeholder = Some(m); - } - } + if let DefaultMethods::No = mode + && let Some(ast::AssocItem::Fn(func)) = &first_new_item + && let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) + && m.syntax().text() == "todo!()" + { + placeholder = Some(m); } if let Some(macro_call) = placeholder { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs index 3b447d1f6d572..753a9e56c35ac 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs @@ -207,10 +207,10 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_> // negate all tail expressions in the closure body let tail_cb = &mut |e: &_| tail_cb_impl(&mut editor, &make, e); walk_expr(&closure_body, &mut |expr| { - if let ast::Expr::ReturnExpr(ret_expr) = expr { - if let Some(ret_expr_arg) = &ret_expr.expr() { - for_each_tail_expr(ret_expr_arg, tail_cb); - } + if let ast::Expr::ReturnExpr(ret_expr) = expr + && let Some(ret_expr_arg) = &ret_expr.expr() + { + for_each_tail_expr(ret_expr_arg, tail_cb); } }); for_each_tail_expr(&closure_body, tail_cb); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs index d7b7e8d9cad07..9d5d3f223707a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs @@ -86,12 +86,11 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_> e @ ast::Expr::CallExpr(_) => Some(e.clone()), _ => None, }; - if let Some(ast::Expr::CallExpr(call)) = e { - if let Some(arg_list) = call.arg_list() { - if let Some(arg) = arg_list.args().next() { - editor.replace(call.syntax(), arg.syntax()); - } - } + if let Some(ast::Expr::CallExpr(call)) = e + && let Some(arg_list) = call.arg_list() + && let Some(arg) = arg_list.args().next() + { + editor.replace(call.syntax(), arg.syntax()); } }); let edit = editor.finish(); @@ -276,12 +275,12 @@ fn is_invalid_body( e @ ast::Expr::CallExpr(_) => Some(e.clone()), _ => None, }; - if let Some(ast::Expr::CallExpr(call)) = e { - if let Some(ast::Expr::PathExpr(p)) = call.expr() { - let res = p.path().and_then(|p| sema.resolve_path(&p)); - if let Some(hir::PathResolution::Def(hir::ModuleDef::Variant(v))) = res { - return invalid |= v != some_variant; - } + if let Some(ast::Expr::CallExpr(call)) = e + && let Some(ast::Expr::PathExpr(p)) = call.expr() + { + let res = p.path().and_then(|p| sema.resolve_path(&p)); + if let Some(hir::PathResolution::Def(hir::ModuleDef::Variant(v))) = res { + return invalid |= v != some_variant; } } invalid = true diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs index 43515de71e20d..916bb67ebb405 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs @@ -101,21 +101,21 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>) // but we need to locate `AstPtr`s inside the body. let mut wrap_body_in_block = true; if let ast::Expr::BlockExpr(block) = &body { - if let Some(async_token) = block.async_token() { - if !is_async { - is_async = true; - ret_ty = ret_ty.future_output(ctx.db())?; - let token_idx = async_token.index(); - let whitespace_tokens_after_count = async_token - .siblings_with_tokens(Direction::Next) - .skip(1) - .take_while(|token| token.kind() == SyntaxKind::WHITESPACE) - .count(); - body.syntax().splice_children( - token_idx..token_idx + whitespace_tokens_after_count + 1, - Vec::new(), - ); - } + if let Some(async_token) = block.async_token() + && !is_async + { + is_async = true; + ret_ty = ret_ty.future_output(ctx.db())?; + let token_idx = async_token.index(); + let whitespace_tokens_after_count = async_token + .siblings_with_tokens(Direction::Next) + .skip(1) + .take_while(|token| token.kind() == SyntaxKind::WHITESPACE) + .count(); + body.syntax().splice_children( + token_idx..token_idx + whitespace_tokens_after_count + 1, + Vec::new(), + ); } if let Some(gen_token) = block.gen_token() { is_gen = true; @@ -513,10 +513,10 @@ fn capture_as_arg(ctx: &AssistContext<'_>, capture: &ClosureCapture) -> ast::Exp CaptureKind::MutableRef | CaptureKind::UniqueSharedRef => true, CaptureKind::Move => return place, }; - if let ast::Expr::PrefixExpr(expr) = &place { - if expr.op_kind() == Some(ast::UnaryOp::Deref) { - return expr.expr().expect("`display_place_source_code()` produced an invalid expr"); - } + if let ast::Expr::PrefixExpr(expr) = &place + && expr.op_kind() == Some(ast::UnaryOp::Deref) + { + return expr.expr().expect("`display_place_source_code()` produced an invalid expr"); } make::expr_ref(place, needs_mut) } @@ -642,11 +642,11 @@ fn peel_blocks_and_refs_and_parens(mut expr: ast::Expr) -> ast::Expr { expr = ast::Expr::cast(parent).unwrap(); continue; } - if let Some(stmt_list) = ast::StmtList::cast(parent) { - if let Some(block) = stmt_list.syntax().parent().and_then(ast::BlockExpr::cast) { - expr = ast::Expr::BlockExpr(block); - continue; - } + if let Some(stmt_list) = ast::StmtList::cast(parent) + && let Some(block) = stmt_list.syntax().parent().and_then(ast::BlockExpr::cast) + { + expr = ast::Expr::BlockExpr(block); + continue; } break; } @@ -662,12 +662,11 @@ fn expr_of_pat(pat: ast::Pat) -> Option { if let Some(let_stmt) = ast::LetStmt::cast(ancestor.clone()) { break 'find_expr let_stmt.initializer(); } - if ast::MatchArm::can_cast(ancestor.kind()) { - if let Some(match_) = + if ast::MatchArm::can_cast(ancestor.kind()) + && let Some(match_) = ancestor.parent().and_then(|it| it.parent()).and_then(ast::MatchExpr::cast) - { - break 'find_expr match_.expr(); - } + { + break 'find_expr match_.expr(); } if ast::ExprStmt::can_cast(ancestor.kind()) { break; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs index db41927f1df2f..a4742bc7bded9 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs @@ -50,10 +50,10 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_> let associated_items = impl_.assoc_item_list()?; let from_fn = associated_items.assoc_items().find_map(|item| { - if let ast::AssocItem::Fn(f) = item { - if f.name()?.text() == "from" { - return Some(f); - } + if let ast::AssocItem::Fn(f) = item + && f.name()?.text() == "from" + { + return Some(f); }; None })?; @@ -110,12 +110,11 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_> )) .clone_for_update(); - if let Some(cap) = ctx.config.snippet_cap { - if let ast::AssocItem::TypeAlias(type_alias) = &error_type { - if let Some(ty) = type_alias.ty() { - builder.add_placeholder_snippet(cap, ty); - } - } + if let Some(cap) = ctx.config.snippet_cap + && let ast::AssocItem::TypeAlias(type_alias) = &error_type + && let Some(ty) = type_alias.ty() + { + builder.add_placeholder_snippet(cap, ty); } associated_items.add_item_at_start(error_type); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs index b80276a95fbf5..3d9cde0e0a67c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs @@ -65,10 +65,10 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) - }; let into_fn = impl_.assoc_item_list()?.assoc_items().find_map(|item| { - if let ast::AssocItem::Fn(f) = item { - if f.name()?.text() == "into" { - return Some(f); - } + if let ast::AssocItem::Fn(f) = item + && f.name()?.text() == "into" + { + return Some(f); }; None })?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index cca4cb9d8f775..247c1011589bb 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -265,10 +265,10 @@ fn replace_body_return_values(body: ast::Expr, struct_name: &str) { let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e); walk_expr(&body, &mut |expr| { - if let ast::Expr::ReturnExpr(ret_expr) = expr { - if let Some(ret_expr_arg) = &ret_expr.expr() { - for_each_tail_expr(ret_expr_arg, tail_cb); - } + if let ast::Expr::ReturnExpr(ret_expr) = expr + && let Some(ret_expr_arg) = &ret_expr.expr() + { + for_each_tail_expr(ret_expr_arg, tail_cb); } }); for_each_tail_expr(&body, tail_cb); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index b27ebcaa4edf2..3d78895477b31 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -192,7 +192,7 @@ fn edit_struct_references( ).syntax().clone() ) }, - _ => return None, + _ => None, } } }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs index e582aa814ae14..1af5db17f0400 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs @@ -100,10 +100,10 @@ fn is_bool_literal_expr( sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr, ) -> Option { - if let ast::Expr::Literal(lit) = expr { - if let ast::LiteralKind::Bool(b) = lit.kind() { - return Some(ArmBodyExpression::Literal(b)); - } + if let ast::Expr::Literal(lit) = expr + && let ast::LiteralKind::Bool(b) = lit.kind() + { + return Some(ArmBodyExpression::Literal(b)); } if !sema.type_of_expr(expr)?.original.is_bool() { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_try_expr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_try_expr.rs index efadde9e3648a..9976e34e730cc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_try_expr.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_try_expr.rs @@ -106,73 +106,73 @@ pub(crate) fn desugar_try_expr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op }, ); - if let Some(let_stmt) = try_expr.syntax().parent().and_then(ast::LetStmt::cast) { - if let_stmt.let_else().is_none() { - let pat = let_stmt.pat()?; - acc.add( - AssistId::refactor_rewrite("desugar_try_expr_let_else"), - "Replace try expression with let else", - target, - |builder| { - let make = SyntaxFactory::with_mappings(); - let mut editor = builder.make_editor(let_stmt.syntax()); + if let Some(let_stmt) = try_expr.syntax().parent().and_then(ast::LetStmt::cast) + && let_stmt.let_else().is_none() + { + let pat = let_stmt.pat()?; + acc.add( + AssistId::refactor_rewrite("desugar_try_expr_let_else"), + "Replace try expression with let else", + target, + |builder| { + let make = SyntaxFactory::with_mappings(); + let mut editor = builder.make_editor(let_stmt.syntax()); - let indent_level = IndentLevel::from_node(let_stmt.syntax()); - let new_let_stmt = make.let_else_stmt( - try_enum.happy_pattern(pat), - let_stmt.ty(), - expr, - make.block_expr( - iter::once( - make.expr_stmt( - make.expr_return(Some(match try_enum { - TryEnum::Option => make.expr_path(make.ident_path("None")), - TryEnum::Result => make - .expr_call( - make.expr_path(make.ident_path("Err")), - make.arg_list(iter::once( - match ctx.config.expr_fill_default { - ExprFillDefaultMode::Todo => make - .expr_macro( - make.ident_path("todo"), - make.token_tree( - syntax::SyntaxKind::L_PAREN, - [], - ), - ) - .into(), - ExprFillDefaultMode::Underscore => { - make.expr_underscore().into() - } - ExprFillDefaultMode::Default => make - .expr_macro( - make.ident_path("todo"), - make.token_tree( - syntax::SyntaxKind::L_PAREN, - [], - ), - ) - .into(), - }, - )), - ) - .into(), - })) - .indent(indent_level + 1) - .into(), - ) + let indent_level = IndentLevel::from_node(let_stmt.syntax()); + let new_let_stmt = make.let_else_stmt( + try_enum.happy_pattern(pat), + let_stmt.ty(), + expr, + make.block_expr( + iter::once( + make.expr_stmt( + make.expr_return(Some(match try_enum { + TryEnum::Option => make.expr_path(make.ident_path("None")), + TryEnum::Result => make + .expr_call( + make.expr_path(make.ident_path("Err")), + make.arg_list(iter::once( + match ctx.config.expr_fill_default { + ExprFillDefaultMode::Todo => make + .expr_macro( + make.ident_path("todo"), + make.token_tree( + syntax::SyntaxKind::L_PAREN, + [], + ), + ) + .into(), + ExprFillDefaultMode::Underscore => { + make.expr_underscore().into() + } + ExprFillDefaultMode::Default => make + .expr_macro( + make.ident_path("todo"), + make.token_tree( + syntax::SyntaxKind::L_PAREN, + [], + ), + ) + .into(), + }, + )), + ) + .into(), + })) + .indent(indent_level + 1) .into(), - ), - None, - ) - .indent(indent_level), - ); - editor.replace(let_stmt.syntax(), new_let_stmt.syntax()); - editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.vfs_file_id(), editor); - }, - ); - } + ) + .into(), + ), + None, + ) + .indent(indent_level), + ); + editor.replace(let_stmt.syntax(), new_let_stmt.syntax()); + editor.add_mappings(make.finish_with_mappings()); + builder.add_file_edits(ctx.vfs_file_id(), editor); + }, + ); } Some(()) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs index 307414c79715a..66552dd65f567 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs @@ -272,16 +272,16 @@ impl Refs { .clone() .into_iter() .filter(|r| { - if let Definition::Trait(tr) = r.def { - if tr.items(ctx.db()).into_iter().any(|ai| { + if let Definition::Trait(tr) = r.def + && tr.items(ctx.db()).into_iter().any(|ai| { if let AssocItem::Function(f) = ai { def_is_referenced_in(Definition::Function(f), ctx) } else { false } - }) { - return true; - } + }) + { + return true; } def_is_referenced_in(r.def, ctx) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs index 00cbef1c01c01..890b8dd64126e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs @@ -175,10 +175,10 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let fn_def = format_function(ctx, module, &fun, old_indent).clone_for_update(); - if let Some(cap) = ctx.config.snippet_cap { - if let Some(name) = fn_def.name() { - builder.add_tabstop_before(cap, name); - } + if let Some(cap) = ctx.config.snippet_cap + && let Some(name) = fn_def.name() + { + builder.add_tabstop_before(cap, name); } let fn_def = match fun.self_param_adt(ctx) { @@ -289,10 +289,10 @@ fn extraction_target(node: &SyntaxNode, selection_range: TextRange) -> Option { let func = sema.to_def(&fn_)?; let mut ret_ty = func.ret_type(sema.db); - if func.is_async(sema.db) { - if let Some(async_ret) = func.async_ret_type(sema.db) { + if func.is_async(sema.db) + && let Some(async_ret) = func.async_ret_type(sema.db) { ret_ty = async_ret; } - } (fn_.const_token().is_some(), fn_.body().map(ast::Expr::BlockExpr), Some(ret_ty)) }, ast::Static(statik) => { @@ -1172,19 +1171,19 @@ impl GenericParent { /// Search `parent`'s ancestors for items with potentially applicable generic parameters fn generic_parents(parent: &SyntaxNode) -> Vec { let mut list = Vec::new(); - if let Some(parent_item) = parent.ancestors().find_map(ast::Item::cast) { - if let ast::Item::Fn(ref fn_) = parent_item { - if let Some(parent_parent) = - parent_item.syntax().parent().and_then(|it| it.parent()).and_then(ast::Item::cast) - { - match parent_parent { - ast::Item::Impl(impl_) => list.push(GenericParent::Impl(impl_)), - ast::Item::Trait(trait_) => list.push(GenericParent::Trait(trait_)), - _ => (), - } + if let Some(parent_item) = parent.ancestors().find_map(ast::Item::cast) + && let ast::Item::Fn(ref fn_) = parent_item + { + if let Some(parent_parent) = + parent_item.syntax().parent().and_then(|it| it.parent()).and_then(ast::Item::cast) + { + match parent_parent { + ast::Item::Impl(impl_) => list.push(GenericParent::Impl(impl_)), + ast::Item::Trait(trait_) => list.push(GenericParent::Trait(trait_)), + _ => (), } - list.push(GenericParent::Fn(fn_.clone())); } + list.push(GenericParent::Fn(fn_.clone())); } list } @@ -1337,10 +1336,10 @@ fn locals_defined_in_body( // see https://github.com/rust-lang/rust-analyzer/pull/7535#discussion_r570048550 let mut res = FxIndexSet::default(); body.walk_pat(&mut |pat| { - if let ast::Pat::IdentPat(pat) = pat { - if let Some(local) = sema.to_def(&pat) { - res.insert(local); - } + if let ast::Pat::IdentPat(pat) = pat + && let Some(local) = sema.to_def(&pat) + { + res.insert(local); } }); res @@ -1445,11 +1444,11 @@ fn impl_type_name(impl_node: &ast::Impl) -> Option { fn fixup_call_site(builder: &mut SourceChangeBuilder, body: &FunctionBody) { let parent_match_arm = body.parent().and_then(ast::MatchArm::cast); - if let Some(parent_match_arm) = parent_match_arm { - if parent_match_arm.comma_token().is_none() { - let parent_match_arm = builder.make_mut(parent_match_arm); - ted::append_child_raw(parent_match_arm.syntax(), make::token(T![,])); - } + if let Some(parent_match_arm) = parent_match_arm + && parent_match_arm.comma_token().is_none() + { + let parent_match_arm = builder.make_mut(parent_match_arm); + ted::append_child_raw(parent_match_arm.syntax(), make::token(T![,])); } } @@ -2120,30 +2119,30 @@ fn update_external_control_flow(handler: &FlowHandler<'_>, syntax: &SyntaxNode) _ => {} }, WalkEvent::Leave(e) => { - if nested_scope.is_none() { - if let Some(expr) = ast::Expr::cast(e.clone()) { - match expr { - ast::Expr::ReturnExpr(return_expr) => { - let expr = return_expr.expr(); - if let Some(replacement) = make_rewritten_flow(handler, expr) { - ted::replace(return_expr.syntax(), replacement.syntax()) - } - } - ast::Expr::BreakExpr(break_expr) if nested_loop.is_none() => { - let expr = break_expr.expr(); - if let Some(replacement) = make_rewritten_flow(handler, expr) { - ted::replace(break_expr.syntax(), replacement.syntax()) - } + if nested_scope.is_none() + && let Some(expr) = ast::Expr::cast(e.clone()) + { + match expr { + ast::Expr::ReturnExpr(return_expr) => { + let expr = return_expr.expr(); + if let Some(replacement) = make_rewritten_flow(handler, expr) { + ted::replace(return_expr.syntax(), replacement.syntax()) } - ast::Expr::ContinueExpr(continue_expr) if nested_loop.is_none() => { - if let Some(replacement) = make_rewritten_flow(handler, None) { - ted::replace(continue_expr.syntax(), replacement.syntax()) - } + } + ast::Expr::BreakExpr(break_expr) if nested_loop.is_none() => { + let expr = break_expr.expr(); + if let Some(replacement) = make_rewritten_flow(handler, expr) { + ted::replace(break_expr.syntax(), replacement.syntax()) } - _ => { - // do nothing + } + ast::Expr::ContinueExpr(continue_expr) if nested_loop.is_none() => { + if let Some(replacement) = make_rewritten_flow(handler, None) { + ted::replace(continue_expr.syntax(), replacement.syntax()) } } + _ => { + // do nothing + } } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs index b82b7984d4a45..c6a6b97df8245 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs @@ -69,13 +69,12 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti let mut impl_parent: Option = None; let mut impl_child_count: usize = 0; - if let Some(parent_assoc_list) = node.parent() { - if let Some(parent_impl) = parent_assoc_list.parent() { - if let Some(impl_) = ast::Impl::cast(parent_impl) { - impl_child_count = parent_assoc_list.children().count(); - impl_parent = Some(impl_); - } - } + if let Some(parent_assoc_list) = node.parent() + && let Some(parent_impl) = parent_assoc_list.parent() + && let Some(impl_) = ast::Impl::cast(parent_impl) + { + impl_child_count = parent_assoc_list.children().count(); + impl_parent = Some(impl_); } let mut curr_parent_module: Option = None; @@ -436,10 +435,10 @@ impl Module { } }) .for_each(|(node, def)| { - if node_set.insert(node.to_string()) { - if let Some(import) = self.process_def_in_sel(def, &node, &module, ctx) { - check_intersection_and_push(&mut imports_to_remove, import); - } + if node_set.insert(node.to_string()) + && let Some(import) = self.process_def_in_sel(def, &node, &module, ctx) + { + check_intersection_and_push(&mut imports_to_remove, import); } }) } @@ -542,15 +541,16 @@ impl Module { import_path_to_be_removed = Some(text_range); } - if def_in_mod && def_out_sel { - if let Some(first_path_in_use_tree) = use_tree_str.last() { - let first_path_in_use_tree_str = first_path_in_use_tree.to_string(); - if !first_path_in_use_tree_str.contains("super") - && !first_path_in_use_tree_str.contains("crate") - { - let super_path = make::ext::ident_path("super"); - use_tree_str.push(super_path); - } + if def_in_mod + && def_out_sel + && let Some(first_path_in_use_tree) = use_tree_str.last() + { + let first_path_in_use_tree_str = first_path_in_use_tree.to_string(); + if !first_path_in_use_tree_str.contains("super") + && !first_path_in_use_tree_str.contains("crate") + { + let super_path = make::ext::ident_path("super"); + use_tree_str.push(super_path); } } @@ -563,12 +563,11 @@ impl Module { if let Some(mut use_tree_paths) = use_tree_paths { use_tree_paths.reverse(); - if uses_exist_out_sel || !uses_exist_in_sel || !def_in_mod || !def_out_sel { - if let Some(first_path_in_use_tree) = use_tree_paths.first() { - if first_path_in_use_tree.to_string().contains("super") { - use_tree_paths.insert(0, make::ext::ident_path("super")); - } - } + if (uses_exist_out_sel || !uses_exist_in_sel || !def_in_mod || !def_out_sel) + && let Some(first_path_in_use_tree) = use_tree_paths.first() + && first_path_in_use_tree.to_string().contains("super") + { + use_tree_paths.insert(0, make::ext::ident_path("super")); } let is_item = matches!( @@ -691,11 +690,9 @@ fn check_def_in_mod_and_out_sel( _ => source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id, }; - if have_same_parent { - if let ModuleSource::Module(module_) = source.value { - let in_sel = !selection_range.contains_range(module_.syntax().text_range()); - return (have_same_parent, in_sel); - } + if have_same_parent && let ModuleSource::Module(module_) = source.value { + let in_sel = !selection_range.contains_range(module_.syntax().text_range()); + return (have_same_parent, in_sel); } return (have_same_parent, false); @@ -772,12 +769,12 @@ fn get_use_tree_paths_from_path( .filter(|x| x.to_string() != path.to_string()) .filter_map(ast::UseTree::cast) .find_map(|use_tree| { - if let Some(upper_tree_path) = use_tree.path() { - if upper_tree_path.to_string() != path.to_string() { - use_tree_str.push(upper_tree_path.clone()); - get_use_tree_paths_from_path(upper_tree_path, use_tree_str); - return Some(use_tree); - } + if let Some(upper_tree_path) = use_tree.path() + && upper_tree_path.to_string() != path.to_string() + { + use_tree_str.push(upper_tree_path.clone()); + get_use_tree_paths_from_path(upper_tree_path, use_tree_str); + return Some(use_tree); } None })?; @@ -786,11 +783,11 @@ fn get_use_tree_paths_from_path( } fn add_change_vis(vis: Option, node_or_token_opt: Option) { - if vis.is_none() { - if let Some(node_or_token) = node_or_token_opt { - let pub_crate_vis = make::visibility_pub_crate().clone_for_update(); - ted::insert(ted::Position::before(node_or_token), pub_crate_vis.syntax()); - } + if vis.is_none() + && let Some(node_or_token) = node_or_token_opt + { + let pub_crate_vis = make::visibility_pub_crate().clone_for_update(); + ted::insert(ted::Position::before(node_or_token), pub_crate_vis.syntax()); } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index 9095b1825f5fd..c56d0b3de5d6a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -215,12 +215,12 @@ fn tag_generics_in_variant(ty: &ast::Type, generics: &mut [(ast::GenericParam, b ast::GenericParam::LifetimeParam(lt) if matches!(token.kind(), T![lifetime_ident]) => { - if let Some(lt) = lt.lifetime() { - if lt.text().as_str() == token.text() { - *tag = true; - tagged_one = true; - break; - } + if let Some(lt) = lt.lifetime() + && lt.text().as_str() == token.text() + { + *tag = true; + tagged_one = true; + break; } } param if matches!(token.kind(), T![ident]) => { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs index d843ac64567aa..79f22381952ae 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs @@ -72,10 +72,10 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> let ty_alias = make::ty_alias("Type", generic_params, None, None, Some((ty, None))) .clone_for_update(); - if let Some(cap) = ctx.config.snippet_cap { - if let Some(name) = ty_alias.name() { - edit.add_annotation(name.syntax(), builder.make_tabstop_before(cap)); - } + if let Some(cap) = ctx.config.snippet_cap + && let Some(name) = ty_alias.name() + { + edit.add_annotation(name.syntax(), builder.make_tabstop_before(cap)); } let indent = IndentLevel::from_node(node); @@ -111,17 +111,17 @@ fn collect_used_generics<'gp>( match ty { ast::Type::PathType(ty) => { if let Some(path) = ty.path() { - if let Some(name_ref) = path.as_single_name_ref() { - if let Some(param) = known_generics.iter().find(|gp| { + if let Some(name_ref) = path.as_single_name_ref() + && let Some(param) = known_generics.iter().find(|gp| { match gp { ast::GenericParam::ConstParam(cp) => cp.name(), ast::GenericParam::TypeParam(tp) => tp.name(), _ => None, } .is_some_and(|n| n.text() == name_ref.text()) - }) { - generics.push(param); - } + }) + { + generics.push(param); } generics.extend( path.segments() @@ -160,20 +160,18 @@ fn collect_used_generics<'gp>( .and_then(|lt| known_generics.iter().find(find_lifetime(<.text()))), ), ast::Type::ArrayType(ar) => { - if let Some(ast::Expr::PathExpr(p)) = ar.const_arg().and_then(|x| x.expr()) { - if let Some(path) = p.path() { - if let Some(name_ref) = path.as_single_name_ref() { - if let Some(param) = known_generics.iter().find(|gp| { - if let ast::GenericParam::ConstParam(cp) = gp { - cp.name().is_some_and(|n| n.text() == name_ref.text()) - } else { - false - } - }) { - generics.push(param); - } + if let Some(ast::Expr::PathExpr(p)) = ar.const_arg().and_then(|x| x.expr()) + && let Some(path) = p.path() + && let Some(name_ref) = path.as_single_name_ref() + && let Some(param) = known_generics.iter().find(|gp| { + if let ast::GenericParam::ConstParam(cp) = gp { + cp.name().is_some_and(|n| n.text() == name_ref.text()) + } else { + false } - } + }) + { + generics.push(param); } } _ => (), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index db2d316d58ee3..c9c1969b9e023 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -404,11 +404,10 @@ impl Anchor { } if let Some(expr) = node.parent().and_then(ast::StmtList::cast).and_then(|it| it.tail_expr()) + && expr.syntax() == &node { - if expr.syntax() == &node { - cov_mark::hit!(test_extract_var_last_expr); - return Some(Anchor::Before(node)); - } + cov_mark::hit!(test_extract_var_last_expr); + return Some(Anchor::Before(node)); } if let Some(parent) = node.parent() { @@ -427,10 +426,10 @@ impl Anchor { } if let Some(stmt) = ast::Stmt::cast(node.clone()) { - if let ast::Stmt::ExprStmt(stmt) = stmt { - if stmt.expr().as_ref() == Some(to_extract) { - return Some(Anchor::Replace(stmt)); - } + if let ast::Stmt::ExprStmt(stmt) = stmt + && stmt.expr().as_ref() == Some(to_extract) + { + return Some(Anchor::Replace(stmt)); } return Some(Anchor::Before(node)); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs index 68587f0cb5bc5..77232dfebdfe4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs @@ -148,11 +148,11 @@ fn make_example_for_fn(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option) -> Option) ], ); - if let Some(cap) = ctx.config.snippet_cap { - if let Some(name) = ty_alias.name() { - edit.add_annotation(name.syntax(), builder.make_placeholder_snippet(cap)); - } + if let Some(cap) = ctx.config.snippet_cap + && let Some(name) = ty_alias.name() + { + edit.add_annotation(name.syntax(), builder.make_placeholder_snippet(cap)); } builder.add_file_edits(ctx.vfs_file_id(), edit); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs index 3290a70e1c69c..613b32fcc1653 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs @@ -70,10 +70,10 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let TargetInfo { target_module, adt_info, target, file } = fn_target_info(ctx, path, &call, fn_name)?; - if let Some(m) = target_module { - if !is_editable_crate(m.krate(), ctx.db()) { - return None; - } + if let Some(m) = target_module + && !is_editable_crate(m.krate(), ctx.db()) + { + return None; } let function_builder = diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs index 20ee9253d379c..807b9194b2df7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs @@ -433,12 +433,11 @@ fn build_source_change( new_fn.indent(1.into()); // Insert a tabstop only for last method we generate - if i == record_fields_count - 1 { - if let Some(cap) = ctx.config.snippet_cap { - if let Some(name) = new_fn.name() { - builder.add_tabstop_before(cap, name); - } - } + if i == record_fields_count - 1 + && let Some(cap) = ctx.config.snippet_cap + && let Some(name) = new_fn.name() + { + builder.add_tabstop_before(cap, name); } assoc_item_list.add_item(new_fn.clone().into()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs index 31cadcf5ea86b..fcb81d239ff3f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs @@ -58,11 +58,11 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio let mut editor = edit.make_editor(nominal.syntax()); // Add a tabstop after the left curly brace - if let Some(cap) = ctx.config.snippet_cap { - if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) { - let tabstop = edit.make_tabstop_after(cap); - editor.add_annotation(l_curly, tabstop); - } + if let Some(cap) = ctx.config.snippet_cap + && let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) + { + let tabstop = edit.make_tabstop_after(cap); + editor.add_annotation(l_curly, tabstop); } insert_impl(&mut editor, &impl_, &nominal); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index 92a4bd35b3e78..dc3dc73701f3a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -175,18 +175,18 @@ fn remove_items_visibility(item: &ast::AssocItem) { } fn strip_body(item: &ast::AssocItem) { - if let ast::AssocItem::Fn(f) = item { - if let Some(body) = f.body() { - // In contrast to function bodies, we want to see no ws before a semicolon. - // So let's remove them if we see any. - if let Some(prev) = body.syntax().prev_sibling_or_token() { - if prev.kind() == SyntaxKind::WHITESPACE { - ted::remove(prev); - } - } - - ted::replace(body.syntax(), make::tokens::semicolon()); + if let ast::AssocItem::Fn(f) = item + && let Some(body) = f.body() + { + // In contrast to function bodies, we want to see no ws before a semicolon. + // So let's remove them if we see any. + if let Some(prev) = body.syntax().prev_sibling_or_token() + && prev.kind() == SyntaxKind::WHITESPACE + { + ted::remove(prev); } + + ted::replace(body.syntax(), make::tokens::semicolon()); }; } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs index 1549b414dcc18..5367350052cbe 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs @@ -393,19 +393,17 @@ fn inline( // `FileReference` incorrect if let Some(imp) = sema.ancestors_with_macros(fn_body.syntax().clone()).find_map(ast::Impl::cast) + && !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) + && let Some(t) = imp.self_ty() { - if !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) { - if let Some(t) = imp.self_ty() { - while let Some(self_tok) = body - .syntax() - .descendants_with_tokens() - .filter_map(NodeOrToken::into_token) - .find(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW) - { - let replace_with = t.clone_subtree().syntax().clone_for_update(); - ted::replace(self_tok, replace_with); - } - } + while let Some(self_tok) = body + .syntax() + .descendants_with_tokens() + .filter_map(NodeOrToken::into_token) + .find(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW) + { + let replace_with = t.clone_subtree().syntax().clone_for_update(); + ted::replace(self_tok, replace_with); } } @@ -415,10 +413,10 @@ fn inline( for stmt in fn_body.statements() { if let Some(let_stmt) = ast::LetStmt::cast(stmt.syntax().to_owned()) { for has_token in let_stmt.syntax().children_with_tokens() { - if let Some(node) = has_token.as_node() { - if let Some(ident_pat) = ast::IdentPat::cast(node.to_owned()) { - func_let_vars.insert(ident_pat.syntax().text().to_string()); - } + if let Some(node) = has_token.as_node() + && let Some(ident_pat) = ast::IdentPat::cast(node.to_owned()) + { + func_let_vars.insert(ident_pat.syntax().text().to_string()); } } } @@ -534,16 +532,15 @@ fn inline( } } - if let Some(generic_arg_list) = generic_arg_list.clone() { - if let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax())) - { - body.reindent_to(IndentLevel(0)); - if let Some(new_body) = ast::BlockExpr::cast( - PathTransform::function_call(target, source, function, generic_arg_list) - .apply(body.syntax()), - ) { - body = new_body; - } + if let Some(generic_arg_list) = generic_arg_list.clone() + && let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax())) + { + body.reindent_to(IndentLevel(0)); + if let Some(new_body) = ast::BlockExpr::cast( + PathTransform::function_call(target, source, function, generic_arg_list) + .apply(body.syntax()), + ) { + body = new_body; } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs index 0c1dc9eb9349f..a645c8b90afc4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs @@ -43,10 +43,10 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> let db = ctx.db(); let const_: ast::Const = ctx.find_node_at_offset()?; // Don't show the assist when the cursor is at the const's body. - if let Some(body) = const_.body() { - if body.syntax().text_range().contains(ctx.offset()) { - return None; - } + if let Some(body) = const_.body() + && body.syntax().text_range().contains(ctx.offset()) + { + return None; } let parent_fn = const_.syntax().ancestors().find_map(ast::Fn::cast)?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs index 1b0c313935376..21debf6745a67 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs @@ -62,10 +62,10 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) -> return None; }; - if let Some(parent) = tgt.syntax().parent() { - if matches!(parent.kind(), syntax::SyntaxKind::BIN_EXPR | syntax::SyntaxKind::LET_STMT) { - return None; - } + if let Some(parent) = tgt.syntax().parent() + && matches!(parent.kind(), syntax::SyntaxKind::BIN_EXPR | syntax::SyntaxKind::LET_STMT) + { + return None; } let target = tgt.syntax().text_range(); @@ -90,10 +90,10 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) -> let mut editor = SyntaxEditor::new(edit_tgt); for (stmt, rhs) in assignments { let mut stmt = stmt.syntax().clone(); - if let Some(parent) = stmt.parent() { - if ast::ExprStmt::cast(parent.clone()).is_some() { - stmt = parent.clone(); - } + if let Some(parent) = stmt.parent() + && ast::ExprStmt::cast(parent.clone()).is_some() + { + stmt = parent.clone(); } editor.replace(stmt, rhs.syntax()); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs index 94b49c5df0915..2cbb24a64fd5a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs @@ -80,15 +80,15 @@ pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> O // parse inside string to escape `"` let escaped = value.escape_default().to_string(); let suffix = string_suffix(token.text()).unwrap_or_default(); - if let Some(offsets) = token.quote_offsets() { - if token.text()[offsets.contents - token.syntax().text_range().start()] == escaped { - let end_quote = offsets.quotes.1; - let end_quote = - TextRange::new(end_quote.start(), end_quote.end() - TextSize::of(suffix)); - edit.replace(offsets.quotes.0, "\""); - edit.replace(end_quote, "\""); - return; - } + if let Some(offsets) = token.quote_offsets() + && token.text()[offsets.contents - token.syntax().text_range().start()] == escaped + { + let end_quote = offsets.quotes.1; + let end_quote = + TextRange::new(end_quote.start(), end_quote.end() - TextSize::of(suffix)); + edit.replace(offsets.quotes.0, "\""); + edit.replace(end_quote, "\""); + return; } edit.replace(token.syntax().text_range(), format!("\"{escaped}\"{suffix}")); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs index fa005a411d361..9f742131e5cb4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs @@ -102,10 +102,10 @@ pub(crate) fn replace_qualified_name_with_use( fn drop_generic_args(path: &ast::Path) -> ast::Path { let path = path.clone_for_update(); - if let Some(segment) = path.segment() { - if let Some(generic_args) = segment.generic_arg_list() { - ted::remove(generic_args.syntax()); - } + if let Some(segment) = path.segment() + && let Some(generic_args) = segment.generic_arg_list() + { + ted::remove(generic_args.syntax()); } path } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs index ac10a829bbf1b..b9385775b4765 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs @@ -41,10 +41,10 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O return None; } // Do nothing if the method is a member of trait. - if let Some(impl_) = function.syntax().ancestors().nth(2).and_then(ast::Impl::cast) { - if impl_.trait_().is_some() { - return None; - } + if let Some(impl_) = function.syntax().ancestors().nth(2).and_then(ast::Impl::cast) + && impl_.trait_().is_some() + { + return None; } // Remove the `async` keyword plus whitespace after it, if any. diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs index cf38262fbf443..eea6c85e8df0a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs @@ -72,20 +72,20 @@ pub(crate) fn unwrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> let mut exprs_to_unwrap = Vec::new(); let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_unwrap, e); walk_expr(&body_expr, &mut |expr| { - if let ast::Expr::ReturnExpr(ret_expr) = expr { - if let Some(ret_expr_arg) = &ret_expr.expr() { - for_each_tail_expr(ret_expr_arg, tail_cb); - } + if let ast::Expr::ReturnExpr(ret_expr) = expr + && let Some(ret_expr_arg) = &ret_expr.expr() + { + for_each_tail_expr(ret_expr_arg, tail_cb); } }); for_each_tail_expr(&body_expr, tail_cb); let is_unit_type = is_unit_type(&happy_type); if is_unit_type { - if let Some(NodeOrToken::Token(token)) = ret_type.syntax().next_sibling_or_token() { - if token.kind() == SyntaxKind::WHITESPACE { - editor.delete(token); - } + if let Some(NodeOrToken::Token(token)) = ret_type.syntax().next_sibling_or_token() + && token.kind() == SyntaxKind::WHITESPACE + { + editor.delete(token); } editor.delete(ret_type.syntax()); @@ -162,10 +162,10 @@ pub(crate) fn unwrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> } } - if let Some(cap) = ctx.config.snippet_cap { - if let Some(final_placeholder) = final_placeholder { - editor.add_annotation(final_placeholder.syntax(), builder.make_tabstop_after(cap)); - } + if let Some(cap) = ctx.config.snippet_cap + && let Some(final_placeholder) = final_placeholder + { + editor.add_annotation(final_placeholder.syntax(), builder.make_tabstop_after(cap)); } editor.add_mappings(make.finish_with_mappings()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs index ecfecbb04ff22..46f3e85e12346 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs @@ -47,10 +47,10 @@ pub(crate) fn unwrap_tuple(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option if tuple_pat.fields().count() != tuple_init.fields().count() { return None; } - if let Some(tys) = &tuple_ty { - if tuple_pat.fields().count() != tys.fields().count() { - return None; - } + if let Some(tys) = &tuple_ty + && tuple_pat.fields().count() != tys.fields().count() + { + return None; } let parent = let_kw.parent()?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs index d7189aa5dbbde..0f089c9b66eb0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs @@ -101,24 +101,24 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let mut exprs_to_wrap = Vec::new(); let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e); walk_expr(&body_expr, &mut |expr| { - if let Expr::ReturnExpr(ret_expr) = expr { - if let Some(ret_expr_arg) = &ret_expr.expr() { - for_each_tail_expr(ret_expr_arg, tail_cb); - } + if let Expr::ReturnExpr(ret_expr) = expr + && let Some(ret_expr_arg) = &ret_expr.expr() + { + for_each_tail_expr(ret_expr_arg, tail_cb); } }); for_each_tail_expr(&body_expr, tail_cb); for ret_expr_arg in exprs_to_wrap { - if let Some(ty) = ctx.sema.type_of_expr(&ret_expr_arg) { - if ty.adjusted().could_unify_with(ctx.db(), &semantic_new_return_ty) { - // The type is already correct, don't wrap it. - // We deliberately don't use `could_unify_with_deeply()`, because as long as the outer - // enum matches it's okay for us, as we don't trigger the assist if the return type - // is already `Option`/`Result`, so mismatched exact type is more likely a mistake - // than something intended. - continue; - } + if let Some(ty) = ctx.sema.type_of_expr(&ret_expr_arg) + && ty.adjusted().could_unify_with(ctx.db(), &semantic_new_return_ty) + { + // The type is already correct, don't wrap it. + // We deliberately don't use `could_unify_with_deeply()`, because as long as the outer + // enum matches it's okay for us, as we don't trigger the assist if the return type + // is already `Option`/`Result`, so mismatched exact type is more likely a mistake + // than something intended. + continue; } let happy_wrapped = make.expr_call( @@ -147,13 +147,13 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op ast::GenericArg::LifetimeArg(_) => false, _ => true, }); - if let Some(error_type_arg) = error_type_arg { - if let Some(cap) = ctx.config.snippet_cap { - editor.add_annotation( - error_type_arg.syntax(), - builder.make_placeholder_snippet(cap), - ); - } + if let Some(error_type_arg) = error_type_arg + && let Some(cap) = ctx.config.snippet_cap + { + editor.add_annotation( + error_type_arg.syntax(), + builder.make_placeholder_snippet(cap), + ); } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs index 5183566d136b5..7d5740b748bef 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs @@ -200,13 +200,12 @@ fn wrap_derive( ], ); - if let Some(snippet_cap) = ctx.config.snippet_cap { - if let Some(first_meta) = + if let Some(snippet_cap) = ctx.config.snippet_cap + && let Some(first_meta) = cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token()) - { - let tabstop = edit.make_tabstop_after(snippet_cap); - editor.add_annotation(first_meta, tabstop); - } + { + let tabstop = edit.make_tabstop_after(snippet_cap); + editor.add_annotation(first_meta, tabstop); } editor.add_mappings(make.finish_with_mappings()); @@ -256,13 +255,12 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) -> editor.replace(attr.syntax(), cfg_attr.syntax()); - if let Some(snippet_cap) = ctx.config.snippet_cap { - if let Some(first_meta) = + if let Some(snippet_cap) = ctx.config.snippet_cap + && let Some(first_meta) = cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token()) - { - let tabstop = edit.make_tabstop_after(snippet_cap); - editor.add_annotation(first_meta, tabstop); - } + { + let tabstop = edit.make_tabstop_after(snippet_cap); + editor.add_annotation(first_meta, tabstop); } editor.add_mappings(make.finish_with_mappings()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index 15c7a6a3fc266..77d471e5a7482 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -131,10 +131,10 @@ pub fn filter_assoc_items( if ignore_items == IgnoreAssocItems::DocHiddenAttrPresent && assoc_item.attrs(sema.db).has_doc_hidden() { - if let hir::AssocItem::Function(f) = assoc_item { - if !f.has_body(sema.db) { - return true; - } + if let hir::AssocItem::Function(f) = assoc_item + && !f.has_body(sema.db) + { + return true; } return false; } @@ -514,10 +514,10 @@ pub(crate) fn find_struct_impl( if !(same_ty && not_trait_impl) { None } else { Some(impl_blk) } }); - if let Some(ref impl_blk) = block { - if has_any_fn(impl_blk, names) { - return None; - } + if let Some(ref impl_blk) = block + && has_any_fn(impl_blk, names) + { + return None; } Some(block) @@ -526,12 +526,11 @@ pub(crate) fn find_struct_impl( fn has_any_fn(imp: &ast::Impl, names: &[String]) -> bool { if let Some(il) = imp.assoc_item_list() { for item in il.assoc_items() { - if let ast::AssocItem::Fn(f) = item { - if let Some(name) = f.name() { - if names.iter().any(|n| n.eq_ignore_ascii_case(&name.text())) { - return true; - } - } + if let ast::AssocItem::Fn(f) = item + && let Some(name) = f.name() + && names.iter().any(|n| n.eq_ignore_ascii_case(&name.text())) + { + return true; } } } @@ -1021,12 +1020,12 @@ pub(crate) fn trimmed_text_range(source_file: &SourceFile, initial_range: TextRa pub(crate) fn convert_param_list_to_arg_list(list: ast::ParamList) -> ast::ArgList { let mut args = vec![]; for param in list.params() { - if let Some(ast::Pat::IdentPat(pat)) = param.pat() { - if let Some(name) = pat.name() { - let name = name.to_string(); - let expr = make::expr_path(make::ext::ident_path(&name)); - args.push(expr); - } + if let Some(ast::Pat::IdentPat(pat)) = param.pat() + && let Some(name) = pat.name() + { + let name = name.to_string(); + let expr = make::expr_path(make::ext::ident_path(&name)); + args.push(expr); } } make::arg_list(args) @@ -1138,12 +1137,11 @@ pub fn is_body_const(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> bo }; match expr { ast::Expr::CallExpr(call) => { - if let Some(ast::Expr::PathExpr(path_expr)) = call.expr() { - if let Some(PathResolution::Def(ModuleDef::Function(func))) = + if let Some(ast::Expr::PathExpr(path_expr)) = call.expr() + && let Some(PathResolution::Def(ModuleDef::Function(func))) = path_expr.path().and_then(|path| sema.resolve_path(&path)) - { - is_const &= func.is_const(sema.db); - } + { + is_const &= func.is_const(sema.db); } } ast::Expr::MethodCallExpr(call) => { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs index 65072d936f635..11d26228ba201 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs @@ -111,10 +111,11 @@ impl Completions { ctx: &CompletionContext<'_>, super_chain_len: Option, ) { - if let Some(len) = super_chain_len { - if len > 0 && len < ctx.depth_from_crate_root { - self.add_keyword(ctx, "super::"); - } + if let Some(len) = super_chain_len + && len > 0 + && len < ctx.depth_from_crate_root + { + self.add_keyword(ctx, "super::"); } } @@ -643,10 +644,10 @@ fn enum_variants_with_paths( let variants = enum_.variants(ctx.db); - if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) { - if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) { - variants.iter().for_each(|variant| process_variant(*variant)); - } + if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) + && impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) + { + variants.iter().for_each(|variant| process_variant(*variant)); } for variant in variants { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs index 5340d65a142dd..f75123324f377 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs @@ -258,12 +258,11 @@ fn complete_methods( fn on_trait_method(&mut self, func: hir::Function) -> ControlFlow<()> { // This needs to come before the `seen_methods` test, so that if we see the same method twice, // once as inherent and once not, we will include it. - if let ItemContainer::Trait(trait_) = func.container(self.ctx.db) { - if self.ctx.exclude_traits.contains(&trait_) - || trait_.complete(self.ctx.db) == Complete::IgnoreMethods - { - return ControlFlow::Continue(()); - } + if let ItemContainer::Trait(trait_) = func.container(self.ctx.db) + && (self.ctx.exclude_traits.contains(&trait_) + || trait_.complete(self.ctx.db) == Complete::IgnoreMethods) + { + return ControlFlow::Continue(()); } if func.self_param(self.ctx.db).is_some() diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs index 809e71cc119e0..fb78386976d61 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs @@ -128,10 +128,10 @@ fn params_from_stmt_list_scope( { let module = scope.module().into(); scope.process_all_names(&mut |name, def| { - if let hir::ScopeDef::Local(local) = def { - if let Ok(ty) = local.ty(ctx.db).display_source_code(ctx.db, module, true) { - cb(name, ty); - } + if let hir::ScopeDef::Local(local) = def + && let Ok(ty) = local.ty(ctx.db).display_source_code(ctx.db, module, true) + { + cb(name, ty); } }); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs index bcf8c0ec527af..cdd77e79b5cd7 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -228,24 +228,22 @@ fn add_function_impl_( .set_documentation(func.docs(ctx.db)) .set_relevance(CompletionRelevance { exact_name_match: true, ..Default::default() }); - if let Some(source) = ctx.sema.source(func) { - if let Some(transformed_fn) = + if let Some(source) = ctx.sema.source(func) + && let Some(transformed_fn) = get_transformed_fn(ctx, source.value, impl_def, async_sugaring) - { - let function_decl = - function_declaration(ctx, &transformed_fn, source.file_id.macro_file()); - match ctx.config.snippet_cap { - Some(cap) => { - let snippet = format!("{function_decl} {{\n $0\n}}"); - item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet)); - } - None => { - let header = format!("{function_decl} {{"); - item.text_edit(TextEdit::replace(replacement_range, header)); - } - }; - item.add_to(acc, ctx.db); - } + { + let function_decl = function_declaration(ctx, &transformed_fn, source.file_id.macro_file()); + match ctx.config.snippet_cap { + Some(cap) => { + let snippet = format!("{function_decl} {{\n $0\n}}"); + item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet)); + } + None => { + let header = format!("{function_decl} {{"); + item.text_edit(TextEdit::replace(replacement_range, header)); + } + }; + item.add_to(acc, ctx.db); } } @@ -447,36 +445,36 @@ fn add_const_impl( ) { let const_name = const_.name(ctx.db).map(|n| n.display_no_db(ctx.edition).to_smolstr()); - if let Some(const_name) = const_name { - if let Some(source) = ctx.sema.source(const_) { - let assoc_item = ast::AssocItem::Const(source.value); - if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) { - let transformed_const = match transformed_item { - ast::AssocItem::Const(const_) => const_, - _ => unreachable!(), - }; - - let label = - make_const_compl_syntax(ctx, &transformed_const, source.file_id.macro_file()); - let replacement = format!("{label} "); - - let mut item = - CompletionItem::new(SymbolKind::Const, replacement_range, label, ctx.edition); - item.lookup_by(format_smolstr!("const {const_name}")) - .set_documentation(const_.docs(ctx.db)) - .set_relevance(CompletionRelevance { - exact_name_match: true, - ..Default::default() - }); - match ctx.config.snippet_cap { - Some(cap) => item.snippet_edit( - cap, - TextEdit::replace(replacement_range, format!("{replacement}$0;")), - ), - None => item.text_edit(TextEdit::replace(replacement_range, replacement)), - }; - item.add_to(acc, ctx.db); - } + if let Some(const_name) = const_name + && let Some(source) = ctx.sema.source(const_) + { + let assoc_item = ast::AssocItem::Const(source.value); + if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) { + let transformed_const = match transformed_item { + ast::AssocItem::Const(const_) => const_, + _ => unreachable!(), + }; + + let label = + make_const_compl_syntax(ctx, &transformed_const, source.file_id.macro_file()); + let replacement = format!("{label} "); + + let mut item = + CompletionItem::new(SymbolKind::Const, replacement_range, label, ctx.edition); + item.lookup_by(format_smolstr!("const {const_name}")) + .set_documentation(const_.docs(ctx.db)) + .set_relevance(CompletionRelevance { + exact_name_match: true, + ..Default::default() + }); + match ctx.config.snippet_cap { + Some(cap) => item.snippet_edit( + cap, + TextEdit::replace(replacement_range, format!("{replacement}$0;")), + ), + None => item.text_edit(TextEdit::replace(replacement_range, replacement)), + }; + item.add_to(acc, ctx.db); } } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs index 013747e4d0cc7..3333300045773 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs @@ -26,18 +26,17 @@ pub(crate) fn complete_mod( let mut current_module = ctx.module; // For `mod $0`, `ctx.module` is its parent, but for `mod f$0`, it's `mod f` itself, but we're // interested in its parent. - if ctx.original_token.kind() == SyntaxKind::IDENT { - if let Some(module) = + if ctx.original_token.kind() == SyntaxKind::IDENT + && let Some(module) = ctx.original_token.parent_ancestors().nth(1).and_then(ast::Module::cast) - { - match ctx.sema.to_def(&module) { - Some(module) if module == current_module => { - if let Some(parent) = current_module.parent(ctx.db) { - current_module = parent; - } + { + match ctx.sema.to_def(&module) { + Some(module) if module == current_module => { + if let Some(parent) = current_module.parent(ctx.db) { + current_module = parent; } - _ => {} } + _ => {} } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs index 62fae1cb23746..815ce5145dbec 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs @@ -64,18 +64,17 @@ pub(crate) fn complete_pattern( if let Some(hir::Adt::Enum(e)) = ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt()) + && (refutable || single_variant_enum(e)) { - if refutable || single_variant_enum(e) { - super::enum_variants_with_paths( - acc, - ctx, - e, - &pattern_ctx.impl_, - |acc, ctx, variant, path| { - acc.add_qualified_variant_pat(ctx, pattern_ctx, variant, path); - }, - ); - } + super::enum_variants_with_paths( + acc, + ctx, + e, + &pattern_ctx.impl_, + |acc, ctx, variant, path| { + acc.add_qualified_variant_pat(ctx, pattern_ctx, variant, path); + }, + ); } // FIXME: ideally, we should look at the type we are matching against and diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index d0023852acf9f..0058611a61539 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -65,26 +65,19 @@ pub(crate) fn complete_postfix( let cfg = ctx.config.import_path_config(ctx.is_nightly); - if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() { - if receiver_ty.impls_trait(ctx.db, drop_trait, &[]) { - if let Some(drop_fn) = ctx.famous_defs().core_mem_drop() { - if let Some(path) = - ctx.module.find_path(ctx.db, ItemInNs::Values(drop_fn.into()), cfg) - { - cov_mark::hit!(postfix_drop_completion); - let mut item = postfix_snippet( - "drop", - "fn drop(&mut self)", - &format!( - "{path}($0{receiver_text})", - path = path.display(ctx.db, ctx.edition) - ), - ); - item.set_documentation(drop_fn.docs(ctx.db)); - item.add_to(acc, ctx.db); - } - } - } + if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() + && receiver_ty.impls_trait(ctx.db, drop_trait, &[]) + && let Some(drop_fn) = ctx.famous_defs().core_mem_drop() + && let Some(path) = ctx.module.find_path(ctx.db, ItemInNs::Values(drop_fn.into()), cfg) + { + cov_mark::hit!(postfix_drop_completion); + let mut item = postfix_snippet( + "drop", + "fn drop(&mut self)", + &format!("{path}($0{receiver_text})", path = path.display(ctx.db, ctx.edition)), + ); + item.set_documentation(drop_fn.docs(ctx.db)); + item.add_to(acc, ctx.db); } postfix_snippet("ref", "&expr", &format!("&{receiver_text}")).add_to(acc, ctx.db); @@ -117,56 +110,50 @@ pub(crate) fn complete_postfix( let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references()); let mut is_in_cond = false; - if let Some(parent) = dot_receiver_including_refs.syntax().parent() { - if let Some(second_ancestor) = parent.parent() { - let sec_ancestor_kind = second_ancestor.kind(); - if let Some(expr) = >::cast(second_ancestor) { - is_in_cond = match expr { - Either::Left(it) => it.condition().is_some_and(|cond| *cond.syntax() == parent), - Either::Right(it) => { - it.condition().is_some_and(|cond| *cond.syntax() == parent) - } - } + if let Some(parent) = dot_receiver_including_refs.syntax().parent() + && let Some(second_ancestor) = parent.parent() + { + let sec_ancestor_kind = second_ancestor.kind(); + if let Some(expr) = >::cast(second_ancestor) { + is_in_cond = match expr { + Either::Left(it) => it.condition().is_some_and(|cond| *cond.syntax() == parent), + Either::Right(it) => it.condition().is_some_and(|cond| *cond.syntax() == parent), } - match &try_enum { - Some(try_enum) if is_in_cond => match try_enum { - TryEnum::Result => { - postfix_snippet( - "let", - "let Ok(_)", - &format!("let Ok($0) = {receiver_text}"), - ) - .add_to(acc, ctx.db); - postfix_snippet( - "letm", - "let Ok(mut _)", - &format!("let Ok(mut $0) = {receiver_text}"), - ) - .add_to(acc, ctx.db); - } - TryEnum::Option => { - postfix_snippet( - "let", - "let Some(_)", - &format!("let Some($0) = {receiver_text}"), - ) - .add_to(acc, ctx.db); - postfix_snippet( - "letm", - "let Some(mut _)", - &format!("let Some(mut $0) = {receiver_text}"), - ) - .add_to(acc, ctx.db); - } - }, - _ if matches!(sec_ancestor_kind, STMT_LIST | EXPR_STMT) => { - postfix_snippet("let", "let", &format!("let $0 = {receiver_text};")) - .add_to(acc, ctx.db); - postfix_snippet("letm", "let mut", &format!("let mut $0 = {receiver_text};")) + } + match &try_enum { + Some(try_enum) if is_in_cond => match try_enum { + TryEnum::Result => { + postfix_snippet("let", "let Ok(_)", &format!("let Ok($0) = {receiver_text}")) .add_to(acc, ctx.db); + postfix_snippet( + "letm", + "let Ok(mut _)", + &format!("let Ok(mut $0) = {receiver_text}"), + ) + .add_to(acc, ctx.db); + } + TryEnum::Option => { + postfix_snippet( + "let", + "let Some(_)", + &format!("let Some($0) = {receiver_text}"), + ) + .add_to(acc, ctx.db); + postfix_snippet( + "letm", + "let Some(mut _)", + &format!("let Some(mut $0) = {receiver_text}"), + ) + .add_to(acc, ctx.db); } - _ => (), + }, + _ if matches!(sec_ancestor_kind, STMT_LIST | EXPR_STMT) => { + postfix_snippet("let", "let", &format!("let $0 = {receiver_text};")) + .add_to(acc, ctx.db); + postfix_snippet("letm", "let mut", &format!("let mut $0 = {receiver_text};")) + .add_to(acc, ctx.db); } + _ => (), } } @@ -258,25 +245,25 @@ pub(crate) fn complete_postfix( ) .add_to(acc, ctx.db); postfix_snippet("not", "!expr", &format!("!{receiver_text}")).add_to(acc, ctx.db); - } else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() { - if receiver_ty.impls_trait(ctx.db, trait_, &[]) { - postfix_snippet( - "for", - "for ele in expr {}", - &format!("for ele in {receiver_text} {{\n $0\n}}"), - ) - .add_to(acc, ctx.db); - } + } else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() + && receiver_ty.impls_trait(ctx.db, trait_, &[]) + { + postfix_snippet( + "for", + "for ele in expr {}", + &format!("for ele in {receiver_text} {{\n $0\n}}"), + ) + .add_to(acc, ctx.db); } } let mut block_should_be_wrapped = true; if dot_receiver.syntax().kind() == BLOCK_EXPR { block_should_be_wrapped = false; - if let Some(parent) = dot_receiver.syntax().parent() { - if matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) { - block_should_be_wrapped = true; - } + if let Some(parent) = dot_receiver.syntax().parent() + && matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) + { + block_should_be_wrapped = true; } }; { @@ -292,10 +279,10 @@ pub(crate) fn complete_postfix( postfix_snippet("const", "const {}", &const_completion_string).add_to(acc, ctx.db); } - if let ast::Expr::Literal(literal) = dot_receiver_including_refs.clone() { - if let Some(literal_text) = ast::String::cast(literal.token()) { - add_format_like_completions(acc, ctx, &dot_receiver_including_refs, cap, &literal_text); - } + if let ast::Expr::Literal(literal) = dot_receiver_including_refs.clone() + && let Some(literal_text) = ast::String::cast(literal.token()) + { + add_format_like_completions(acc, ctx, &dot_receiver_including_refs, cap, &literal_text); } postfix_snippet( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs index d2ab193ec3dfa..f39b641649326 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs @@ -54,12 +54,10 @@ pub(crate) fn complete_use_path( for (name, def) in module_scope { if let (Some(attrs), Some(defining_crate)) = (def.attrs(ctx.db), def.krate(ctx.db)) + && (!ctx.check_stability(Some(&attrs)) + || ctx.is_doc_hidden(&attrs, defining_crate)) { - if !ctx.check_stability(Some(&attrs)) - || ctx.is_doc_hidden(&attrs, defining_crate) - { - continue; - } + continue; } let is_name_already_imported = already_imported_names.contains(name.as_str()); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs index 38761f77a2c5f..28d906d91ce5a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs @@ -20,11 +20,11 @@ pub(crate) fn complete_vis_path( // Try completing next child module of the path that is still a parent of the current module let next_towards_current = ctx.module.path_to_root(ctx.db).into_iter().take_while(|it| it != module).last(); - if let Some(next) = next_towards_current { - if let Some(name) = next.name(ctx.db) { - cov_mark::hit!(visibility_qualified); - acc.add_module(ctx, path_ctx, next, name, vec![]); - } + if let Some(next) = next_towards_current + && let Some(name) = next.name(ctx.db) + { + cov_mark::hit!(visibility_qualified); + acc.add_module(ctx, path_ctx, next, name, vec![]); } acc.add_super_keyword(ctx, *super_chain_len); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index ea5fb39338b2e..2eabf99fc697e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -287,24 +287,22 @@ fn expand( &spec_attr, fake_ident_token.clone(), ), - ) { - if let Some((fake_mapped_token, _)) = - fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank) - { - return Some(ExpansionResult { - original_file: original_file.value, - speculative_file, - original_offset, - speculative_offset: fake_ident_token.text_range().start(), - fake_ident_token, - derive_ctx: Some(( - actual_expansion, - fake_expansion, - fake_mapped_token.text_range().start(), - orig_attr, - )), - }); - } + ) && let Some((fake_mapped_token, _)) = + fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank) + { + return Some(ExpansionResult { + original_file: original_file.value, + speculative_file, + original_offset, + speculative_offset: fake_ident_token.text_range().start(), + fake_ident_token, + derive_ctx: Some(( + actual_expansion, + fake_expansion, + fake_mapped_token.text_range().start(), + orig_attr, + )), + }); } if let Some(spec_adt) = @@ -535,14 +533,13 @@ fn analyze<'db>( NameRefKind::Path(PathCompletionCtx { kind: PathKind::Expr { .. }, path, .. }, ..), .. } = &nameref_ctx + && is_in_token_of_for_loop(path) { - if is_in_token_of_for_loop(path) { - // for pat $0 - // there is nothing to complete here except `in` keyword - // don't bother populating the context - // Ideally this special casing wouldn't be needed, but the parser recovers - return None; - } + // for pat $0 + // there is nothing to complete here except `in` keyword + // don't bother populating the context + // Ideally this special casing wouldn't be needed, but the parser recovers + return None; } qual_ctx = qualifier_ctx; @@ -951,29 +948,26 @@ fn classify_name_ref<'db>( let inbetween_body_and_decl_check = |node: SyntaxNode| { if let Some(NodeOrToken::Node(n)) = syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev) + && let Some(item) = ast::Item::cast(n) { - if let Some(item) = ast::Item::cast(n) { - let is_inbetween = match &item { - ast::Item::Const(it) => it.body().is_none() && it.semicolon_token().is_none(), - ast::Item::Enum(it) => it.variant_list().is_none(), - ast::Item::ExternBlock(it) => it.extern_item_list().is_none(), - ast::Item::Fn(it) => it.body().is_none() && it.semicolon_token().is_none(), - ast::Item::Impl(it) => it.assoc_item_list().is_none(), - ast::Item::Module(it) => { - it.item_list().is_none() && it.semicolon_token().is_none() - } - ast::Item::Static(it) => it.body().is_none(), - ast::Item::Struct(it) => { - it.field_list().is_none() && it.semicolon_token().is_none() - } - ast::Item::Trait(it) => it.assoc_item_list().is_none(), - ast::Item::TypeAlias(it) => it.ty().is_none() && it.semicolon_token().is_none(), - ast::Item::Union(it) => it.record_field_list().is_none(), - _ => false, - }; - if is_inbetween { - return Some(item); + let is_inbetween = match &item { + ast::Item::Const(it) => it.body().is_none() && it.semicolon_token().is_none(), + ast::Item::Enum(it) => it.variant_list().is_none(), + ast::Item::ExternBlock(it) => it.extern_item_list().is_none(), + ast::Item::Fn(it) => it.body().is_none() && it.semicolon_token().is_none(), + ast::Item::Impl(it) => it.assoc_item_list().is_none(), + ast::Item::Module(it) => it.item_list().is_none() && it.semicolon_token().is_none(), + ast::Item::Static(it) => it.body().is_none(), + ast::Item::Struct(it) => { + it.field_list().is_none() && it.semicolon_token().is_none() } + ast::Item::Trait(it) => it.assoc_item_list().is_none(), + ast::Item::TypeAlias(it) => it.ty().is_none() && it.semicolon_token().is_none(), + ast::Item::Union(it) => it.record_field_list().is_none(), + _ => false, + }; + if is_inbetween { + return Some(item); } } None @@ -1502,10 +1496,10 @@ fn classify_name_ref<'db>( } }; } - } else if let Some(segment) = path.segment() { - if segment.coloncolon_token().is_some() { - path_ctx.qualified = Qualified::Absolute; - } + } else if let Some(segment) = path.segment() + && segment.coloncolon_token().is_some() + { + path_ctx.qualified = Qualified::Absolute; } let mut qualifier_ctx = QualifierCtx::default(); @@ -1530,38 +1524,30 @@ fn classify_name_ref<'db>( if let Some(top) = top_node { if let Some(NodeOrToken::Node(error_node)) = syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev) + && error_node.kind() == SyntaxKind::ERROR { - if error_node.kind() == SyntaxKind::ERROR { - for token in - error_node.children_with_tokens().filter_map(NodeOrToken::into_token) - { - match token.kind() { - SyntaxKind::UNSAFE_KW => qualifier_ctx.unsafe_tok = Some(token), - SyntaxKind::ASYNC_KW => qualifier_ctx.async_tok = Some(token), - SyntaxKind::SAFE_KW => qualifier_ctx.safe_tok = Some(token), - _ => {} - } + for token in error_node.children_with_tokens().filter_map(NodeOrToken::into_token) { + match token.kind() { + SyntaxKind::UNSAFE_KW => qualifier_ctx.unsafe_tok = Some(token), + SyntaxKind::ASYNC_KW => qualifier_ctx.async_tok = Some(token), + SyntaxKind::SAFE_KW => qualifier_ctx.safe_tok = Some(token), + _ => {} } - qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast); } + qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast); } - if let PathKind::Item { .. } = path_ctx.kind { - if qualifier_ctx.none() { - if let Some(t) = top.first_token() { - if let Some(prev) = t - .prev_token() - .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev)) - { - if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) { - // This was inferred to be an item position path, but it seems - // to be part of some other broken node which leaked into an item - // list - return None; - } - } - } - } + if let PathKind::Item { .. } = path_ctx.kind + && qualifier_ctx.none() + && let Some(t) = top.first_token() + && let Some(prev) = + t.prev_token().and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev)) + && ![T![;], T!['}'], T!['{']].contains(&prev.kind()) + { + // This was inferred to be an item position path, but it seems + // to be part of some other broken node which leaked into an item + // list + return None; } } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs index dcaac3997b275..f27cd07816657 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs @@ -636,10 +636,10 @@ impl Builder { } pub(crate) fn set_detail(&mut self, detail: Option>) -> &mut Builder { self.detail = detail.map(Into::into); - if let Some(detail) = &self.detail { - if never!(detail.contains('\n'), "multiline detail:\n{}", detail) { - self.detail = Some(detail.split('\n').next().unwrap().to_owned()); - } + if let Some(detail) = &self.detail + && never!(detail.contains('\n'), "multiline detail:\n{}", detail) + { + self.detail = Some(detail.split('\n').next().unwrap().to_owned()); } self } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index 1fdd4cdb1c6bb..a70a1138d2f42 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -208,9 +208,9 @@ pub fn completions( // when the user types a bare `_` (that is it does not belong to an identifier) // the user might just wanted to type a `_` for type inference or pattern discarding // so try to suppress completions in those cases - if trigger_character == Some('_') && ctx.original_token.kind() == syntax::SyntaxKind::UNDERSCORE - { - if let CompletionAnalysis::NameRef(NameRefContext { + if trigger_character == Some('_') + && ctx.original_token.kind() == syntax::SyntaxKind::UNDERSCORE + && let CompletionAnalysis::NameRef(NameRefContext { kind: NameRefKind::Path( path_ctx @ PathCompletionCtx { @@ -220,11 +220,9 @@ pub fn completions( ), .. }) = analysis - { - if path_ctx.is_trivial_path() { - return None; - } - } + && path_ctx.is_trivial_path() + { + return None; } { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index c6b8af3c79a2d..3d7a4067c2cd0 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -164,19 +164,18 @@ pub(crate) fn render_field( let expected_fn_type = ctx.completion.expected_type.as_ref().is_some_and(|ty| ty.is_fn() || ty.is_closure()); - if !expected_fn_type { - if let Some(receiver) = &dot_access.receiver { - if let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone()) { - builder.insert(receiver.syntax().text_range().start(), "(".to_owned()); - builder.insert(ctx.source_range().end(), ")".to_owned()); - - let is_parens_needed = - !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); - - if is_parens_needed { - builder.insert(ctx.source_range().end(), "()".to_owned()); - } - } + if !expected_fn_type + && let Some(receiver) = &dot_access.receiver + && let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone()) + { + builder.insert(receiver.syntax().text_range().start(), "(".to_owned()); + builder.insert(ctx.source_range().end(), ")".to_owned()); + + let is_parens_needed = + !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); + + if is_parens_needed { + builder.insert(ctx.source_range().end(), "()".to_owned()); } } @@ -184,12 +183,11 @@ pub(crate) fn render_field( } else { item.insert_text(field_with_receiver(receiver.as_deref(), &escaped_name)); } - if let Some(receiver) = &dot_access.receiver { - if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) { - if let Some(ref_mode) = compute_ref_match(ctx.completion, ty) { - item.ref_match(ref_mode, original.syntax().text_range().start()); - } - } + if let Some(receiver) = &dot_access.receiver + && let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) + && let Some(ref_mode) = compute_ref_match(ctx.completion, ty) + { + item.ref_match(ref_mode, original.syntax().text_range().start()); } item.doc_aliases(ctx.doc_aliases); item.build(db) @@ -437,26 +435,21 @@ fn render_resolution_path( path_ctx, PathCompletionCtx { kind: PathKind::Type { .. }, has_type_args: false, .. } ) && config.callable.is_some(); - if type_path_no_ty_args { - if let Some(cap) = cap { - let has_non_default_type_params = match resolution { - ScopeDef::ModuleDef(hir::ModuleDef::Adt(it)) => it.has_non_default_type_params(db), - ScopeDef::ModuleDef(hir::ModuleDef::TypeAlias(it)) => { - it.has_non_default_type_params(db) - } - _ => false, - }; - - if has_non_default_type_params { - cov_mark::hit!(inserts_angle_brackets_for_generics); - item.lookup_by(name.clone()) - .label(SmolStr::from_iter([&name, "<…>"])) - .trigger_call_info() - .insert_snippet( - cap, - format!("{}<$0>", local_name.display(db, completion.edition)), - ); + if type_path_no_ty_args && let Some(cap) = cap { + let has_non_default_type_params = match resolution { + ScopeDef::ModuleDef(hir::ModuleDef::Adt(it)) => it.has_non_default_type_params(db), + ScopeDef::ModuleDef(hir::ModuleDef::TypeAlias(it)) => { + it.has_non_default_type_params(db) } + _ => false, + }; + + if has_non_default_type_params { + cov_mark::hit!(inserts_angle_brackets_for_generics); + item.lookup_by(name.clone()) + .label(SmolStr::from_iter([&name, "<…>"])) + .trigger_call_info() + .insert_snippet(cap, format!("{}<$0>", local_name.display(db, completion.edition))); } } @@ -634,23 +627,24 @@ fn compute_ref_match( if expected_type.could_unify_with(ctx.db, completion_ty) { return None; } - if let Some(expected_without_ref) = &expected_without_ref { - if completion_ty.autoderef(ctx.db).any(|ty| ty == *expected_without_ref) { - cov_mark::hit!(suggest_ref); - let mutability = if expected_type.is_mutable_reference() { - hir::Mutability::Mut - } else { - hir::Mutability::Shared - }; - return Some(CompletionItemRefMode::Reference(mutability)); - } + if let Some(expected_without_ref) = &expected_without_ref + && completion_ty.autoderef(ctx.db).any(|ty| ty == *expected_without_ref) + { + cov_mark::hit!(suggest_ref); + let mutability = if expected_type.is_mutable_reference() { + hir::Mutability::Mut + } else { + hir::Mutability::Shared + }; + return Some(CompletionItemRefMode::Reference(mutability)); } - if let Some(completion_without_ref) = completion_without_ref { - if completion_without_ref == *expected_type && completion_without_ref.is_copy(ctx.db) { - cov_mark::hit!(suggest_deref); - return Some(CompletionItemRefMode::Dereference); - } + if let Some(completion_without_ref) = completion_without_ref + && completion_without_ref == *expected_type + && completion_without_ref.is_copy(ctx.db) + { + cov_mark::hit!(suggest_deref); + return Some(CompletionItemRefMode::Dereference); } None @@ -664,10 +658,10 @@ fn path_ref_match( ) { if let Some(original_path) = &path_ctx.original_path { // At least one char was typed by the user already, in that case look for the original path - if let Some(original_path) = completion.sema.original_ast_node(original_path.clone()) { - if let Some(ref_mode) = compute_ref_match(completion, ty) { - item.ref_match(ref_mode, original_path.syntax().text_range().start()); - } + if let Some(original_path) = completion.sema.original_ast_node(original_path.clone()) + && let Some(ref_mode) = compute_ref_match(completion, ty) + { + item.ref_match(ref_mode, original_path.syntax().text_range().start()); } } else { // completion requested on an empty identifier, there is no path here yet. diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs index f11b3023679ac..707a8aed4fb9e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs @@ -25,10 +25,10 @@ fn render(ctx: RenderContext<'_>, const_: hir::Const) -> Option .detail(detail) .set_relevance(ctx.completion_relevance()); - if let Some(actm) = const_.as_assoc_item(db) { - if let Some(trt) = actm.container_or_implemented_trait(db) { - item.trait_name(trt.name(db).display_no_db(ctx.completion.edition).to_smolstr()); - } + if let Some(actm) = const_.as_assoc_item(db) + && let Some(trt) = actm.container_or_implemented_trait(db) + { + item.trait_name(trt.name(db).display_no_db(ctx.completion.edition).to_smolstr()); } item.insert_text(escaped_name); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs index 7669aec8f535c..c466019f991f7 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs @@ -132,10 +132,10 @@ fn render( super::path_ref_match(completion, path_ctx, &ret_type, &mut item); } FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => { - if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) { - if let Some(ref_mode) = compute_ref_match(completion, &ret_type) { - item.ref_match(ref_mode, original_expr.syntax().text_range().start()); - } + if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) + && let Some(ref_mode) = compute_ref_match(completion, &ret_type) + { + item.ref_match(ref_mode, original_expr.syntax().text_range().start()); } } _ => (), @@ -169,12 +169,10 @@ fn render( item.add_import(import_to_add); } None => { - if let Some(actm) = assoc_item { - if let Some(trt) = actm.container_or_implemented_trait(db) { - item.trait_name( - trt.name(db).display_no_db(ctx.completion.edition).to_smolstr(), - ); - } + if let Some(actm) = assoc_item + && let Some(trt) = actm.container_or_implemented_trait(db) + { + item.trait_name(trt.name(db).display_no_db(ctx.completion.edition).to_smolstr()); } } } @@ -378,15 +376,13 @@ fn params<'db>( ctx.config.callable.as_ref()?; // Don't add parentheses if the expected type is a function reference with the same signature. - if let Some(expected) = ctx.expected_type.as_ref().filter(|e| e.is_fn()) { - if let Some(expected) = expected.as_callable(ctx.db) { - if let Some(completed) = func.ty(ctx.db).as_callable(ctx.db) { - if expected.sig() == completed.sig() { - cov_mark::hit!(no_call_parens_if_fn_ptr_needed); - return None; - } - } - } + if let Some(expected) = ctx.expected_type.as_ref().filter(|e| e.is_fn()) + && let Some(expected) = expected.as_callable(ctx.db) + && let Some(completed) = func.ty(ctx.db).as_callable(ctx.db) + && expected.sig() == completed.sig() + { + cov_mark::hit!(no_call_parens_if_fn_ptr_needed); + return None; } let self_param = if has_dot_receiver || matches!(func_kind, FuncKind::Method(_, Some(_))) { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs index d57feee4fa65e..3fc0f369e5ada 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs @@ -51,10 +51,10 @@ fn render( .detail(detail) .set_relevance(ctx.completion_relevance()); - if let Some(actm) = type_alias.as_assoc_item(db) { - if let Some(trt) = actm.container_or_implemented_trait(db) { - item.trait_name(trt.name(db).display_no_db(ctx.completion.edition).to_smolstr()); - } + if let Some(actm) = type_alias.as_assoc_item(db) + && let Some(trt) = actm.container_or_implemented_trait(db) + { + item.trait_name(trt.name(db).display_no_db(ctx.completion.edition).to_smolstr()); } item.insert_text(escaped_name); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs index a4a140ec57aa0..2a4fcf6a2e5f7 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs @@ -610,18 +610,16 @@ impl<'db> NameClass<'db> { let local = sema.to_def(&ident_pat)?; let pat_parent = ident_pat.syntax().parent(); - if let Some(record_pat_field) = pat_parent.and_then(ast::RecordPatField::cast) { - if record_pat_field.name_ref().is_none() { - if let Some((field, _, adt_subst)) = - sema.resolve_record_pat_field_with_subst(&record_pat_field) - { - return Some(NameClass::PatFieldShorthand { - local_def: local, - field_ref: field, - adt_subst, - }); - } - } + if let Some(record_pat_field) = pat_parent.and_then(ast::RecordPatField::cast) + && record_pat_field.name_ref().is_none() + && let Some((field, _, adt_subst)) = + sema.resolve_record_pat_field_with_subst(&record_pat_field) + { + return Some(NameClass::PatFieldShorthand { + local_def: local, + field_ref: field, + adt_subst, + }); } Some(NameClass::Definition(Definition::Local(local))) } @@ -755,30 +753,27 @@ impl<'db> NameRefClass<'db> { let parent = name_ref.syntax().parent()?; - if let Some(record_field) = ast::RecordExprField::for_field_name(name_ref) { - if let Some((field, local, _, adt_subst)) = + if let Some(record_field) = ast::RecordExprField::for_field_name(name_ref) + && let Some((field, local, _, adt_subst)) = sema.resolve_record_field_with_substitution(&record_field) - { - let res = match local { - None => NameRefClass::Definition(Definition::Field(field), Some(adt_subst)), - Some(local) => NameRefClass::FieldShorthand { - field_ref: field, - local_ref: local, - adt_subst, - }, - }; - return Some(res); - } + { + let res = match local { + None => NameRefClass::Definition(Definition::Field(field), Some(adt_subst)), + Some(local) => { + NameRefClass::FieldShorthand { field_ref: field, local_ref: local, adt_subst } + } + }; + return Some(res); } if let Some(path) = ast::PathSegment::cast(parent.clone()).map(|it| it.parent_path()) { - if path.parent_path().is_none() { - if let Some(macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { - // Only use this to resolve to macro calls for last segments as qualifiers resolve - // to modules below. - if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { - return Some(NameRefClass::Definition(Definition::Macro(macro_def), None)); - } + if path.parent_path().is_none() + && let Some(macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) + { + // Only use this to resolve to macro calls for last segments as qualifiers resolve + // to modules below. + if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { + return Some(NameRefClass::Definition(Definition::Macro(macro_def), None)); } } return sema @@ -820,8 +815,8 @@ impl<'db> NameRefClass<'db> { // ^^^^^ let containing_path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; let resolved = sema.resolve_path(&containing_path)?; - if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved { - if let Some(ty) = tr + if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved + && let Some(ty) = tr .items_with_supertraits(sema.db) .iter() .filter_map(|&assoc| match assoc { @@ -833,7 +828,6 @@ impl<'db> NameRefClass<'db> { // No substitution, this can only occur in type position. return Some(NameRefClass::Definition(Definition::TypeAlias(ty), None)); } - } None }, ast::UseBoundGenericArgs(_) => { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs index 340429037e67a..1e54058dd16ca 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs @@ -70,11 +70,11 @@ pub fn visit_file_defs( }; let mut defs: VecDeque<_> = module.declarations(db).into(); while let Some(def) = defs.pop_front() { - if let ModuleDef::Module(submodule) = def { - if submodule.is_inline(db) { - defs.extend(submodule.declarations(db)); - submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into())); - } + if let ModuleDef::Module(submodule) = def + && submodule.is_inline(db) + { + defs.extend(submodule.declarations(db)); + submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into())); } cb(def.into()); } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs index 813f38380f69d..08cd8f28608ca 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs @@ -97,12 +97,11 @@ impl ImportScope { .map(ImportScopeKind::Module) .map(|kind| ImportScope { kind, required_cfgs }); } else if let Some(has_attrs) = ast::AnyHasAttrs::cast(syntax) { - if block.is_none() { - if let Some(b) = ast::BlockExpr::cast(has_attrs.syntax().clone()) { - if let Some(b) = sema.original_ast_node(b) { - block = b.stmt_list(); - } - } + if block.is_none() + && let Some(b) = ast::BlockExpr::cast(has_attrs.syntax().clone()) + && let Some(b) = sema.original_ast_node(b) + { + block = b.stmt_list(); } if has_attrs .attrs() @@ -349,26 +348,24 @@ fn guess_granularity_from_scope(scope: &ImportScope) -> ImportGranularityGuess { seen_one_style_groups.push((curr_vis.clone(), curr_attrs.clone())); } else if eq_visibility(prev_vis, curr_vis.clone()) && eq_attrs(prev_attrs, curr_attrs.clone()) + && let Some((prev_path, curr_path)) = prev.path().zip(curr.path()) + && let Some((prev_prefix, _)) = common_prefix(&prev_path, &curr_path) { - if let Some((prev_path, curr_path)) = prev.path().zip(curr.path()) { - if let Some((prev_prefix, _)) = common_prefix(&prev_path, &curr_path) { - if prev.use_tree_list().is_none() && curr.use_tree_list().is_none() { - let prefix_c = prev_prefix.qualifiers().count(); - let curr_c = curr_path.qualifiers().count() - prefix_c; - let prev_c = prev_path.qualifiers().count() - prefix_c; - if curr_c == 1 && prev_c == 1 { - // Same prefix, only differing in the last segment and no use tree lists so this has to be of item style. - break ImportGranularityGuess::Item; - } else { - // Same prefix and no use tree list but differs in more than one segment at the end. This might be module style still. - res = ImportGranularityGuess::ModuleOrItem; - } - } else { - // Same prefix with item tree lists, has to be module style as it - // can't be crate style since the trees wouldn't share a prefix then. - break ImportGranularityGuess::Module; - } + if prev.use_tree_list().is_none() && curr.use_tree_list().is_none() { + let prefix_c = prev_prefix.qualifiers().count(); + let curr_c = curr_path.qualifiers().count() - prefix_c; + let prev_c = prev_path.qualifiers().count() - prefix_c; + if curr_c == 1 && prev_c == 1 { + // Same prefix, only differing in the last segment and no use tree lists so this has to be of item style. + break ImportGranularityGuess::Item; + } else { + // Same prefix and no use tree list but differs in more than one segment at the end. This might be module style still. + res = ImportGranularityGuess::ModuleOrItem; } + } else { + // Same prefix with item tree lists, has to be module style as it + // can't be crate style since the trees wouldn't share a prefix then. + break ImportGranularityGuess::Module; } } prev = curr; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs index b7432d89c7b77..5d88afec50951 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs @@ -193,13 +193,12 @@ impl<'a> PathTransform<'a> { } } (Either::Right(k), None) => { - if let Some(default) = k.default(db) { - if let Some(default) = + if let Some(default) = k.default(db) + && let Some(default) = &default.display_source_code(db, source_module.into(), false).ok() - { - type_substs.insert(k, make::ty(default).clone_for_update()); - defaulted_params.push(Either::Left(k)); - } + { + type_substs.insert(k, make::ty(default).clone_for_update()); + defaulted_params.push(Either::Left(k)); } } (Either::Left(k), Some(TypeOrConst::Either(v))) => { @@ -221,11 +220,10 @@ impl<'a> PathTransform<'a> { (Either::Left(k), None) => { if let Some(default) = k.default(db, target_module.krate().to_display_target(db)) + && let Some(default) = default.expr() { - if let Some(default) = default.expr() { - const_substs.insert(k, default.syntax().clone_for_update()); - defaulted_params.push(Either::Right(k)); - } + const_substs.insert(k, default.syntax().clone_for_update()); + defaulted_params.push(Either::Right(k)); } } _ => (), // ignore mismatching params @@ -427,14 +425,14 @@ impl Ctx<'_> { } } hir::PathResolution::Def(def) if def.as_assoc_item(self.source_scope.db).is_none() => { - if let hir::ModuleDef::Trait(_) = def { - if matches!(path.segment()?.kind()?, ast::PathSegmentKind::Type { .. }) { - // `speculative_resolve` resolves segments like `` into `Trait`, but just the trait name should - // not be used as the replacement of the original - // segment. - return None; - } + if let hir::ModuleDef::Trait(_) = def + && matches!(path.segment()?.kind()?, ast::PathSegmentKind::Type { .. }) + { + // `speculative_resolve` resolves segments like `` into `Trait`, but just the trait name should + // not be used as the replacement of the original + // segment. + return None; } let cfg = ImportPathConfig { @@ -446,19 +444,17 @@ impl Ctx<'_> { let found_path = self.target_module.find_path(self.source_scope.db, def, cfg)?; let res = mod_path_to_ast(&found_path, self.target_edition).clone_for_update(); let mut res_editor = SyntaxEditor::new(res.syntax().clone_subtree()); - if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) { - if let Some(segment) = res.segment() { - if let Some(old) = segment.generic_arg_list() { - res_editor.replace( - old.syntax(), - args.clone_subtree().syntax().clone_for_update(), - ) - } else { - res_editor.insert( - syntax_editor::Position::last_child_of(segment.syntax()), - args.clone_subtree().syntax().clone_for_update(), - ); - } + if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) + && let Some(segment) = res.segment() + { + if let Some(old) = segment.generic_arg_list() { + res_editor + .replace(old.syntax(), args.clone_subtree().syntax().clone_for_update()) + } else { + res_editor.insert( + syntax_editor::Position::last_child_of(segment.syntax()), + args.clone_subtree().syntax().clone_for_update(), + ); } } let res = res_editor.finish().new_root().clone(); @@ -485,27 +481,27 @@ impl Ctx<'_> { .ok()?; let ast_ty = make::ty(ty_str).clone_for_update(); - if let Some(adt) = ty.as_adt() { - if let ast::Type::PathType(path_ty) = &ast_ty { - let cfg = ImportPathConfig { - prefer_no_std: false, - prefer_prelude: true, - prefer_absolute: false, - allow_unstable: true, - }; - let found_path = self.target_module.find_path( - self.source_scope.db, - ModuleDef::from(adt), - cfg, - )?; - - if let Some(qual) = - mod_path_to_ast(&found_path, self.target_edition).qualifier() - { - let res = make::path_concat(qual, path_ty.path()?).clone_for_update(); - editor.replace(path.syntax(), res.syntax()); - return Some(()); - } + if let Some(adt) = ty.as_adt() + && let ast::Type::PathType(path_ty) = &ast_ty + { + let cfg = ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + allow_unstable: true, + }; + let found_path = self.target_module.find_path( + self.source_scope.db, + ModuleDef::from(adt), + cfg, + )?; + + if let Some(qual) = + mod_path_to_ast(&found_path, self.target_edition).qualifier() + { + let res = make::path_concat(qual, path_ty.path()?).clone_for_update(); + editor.replace(path.syntax(), res.syntax()); + return Some(()); } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs index 4e737e27f0505..424b27a398b20 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs @@ -442,17 +442,17 @@ fn source_edit_from_name( name: &ast::Name, new_name: &dyn Display, ) -> bool { - if ast::RecordPatField::for_field_name(name).is_some() { - if let Some(ident_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) { - cov_mark::hit!(rename_record_pat_field_name_split); - // Foo { ref mut field } -> Foo { new_name: ref mut field } - // ^ insert `new_name: ` - - // FIXME: instead of splitting the shorthand, recursively trigger a rename of the - // other name https://github.com/rust-lang/rust-analyzer/issues/6547 - edit.insert(ident_pat.syntax().text_range().start(), format!("{new_name}: ")); - return true; - } + if ast::RecordPatField::for_field_name(name).is_some() + && let Some(ident_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) + { + cov_mark::hit!(rename_record_pat_field_name_split); + // Foo { ref mut field } -> Foo { new_name: ref mut field } + // ^ insert `new_name: ` + + // FIXME: instead of splitting the shorthand, recursively trigger a rename of the + // other name https://github.com/rust-lang/rust-analyzer/issues/6547 + edit.insert(ident_pat.syntax().text_range().start(), format!("{new_name}: ")); + return true; } false diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 9cf0bcf919011..4dd64229d2748 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -295,10 +295,10 @@ impl Definition { } // def is crate root - if let &Definition::Module(module) = self { - if module.is_crate_root() { - return SearchScope::reverse_dependencies(db, module.krate()); - } + if let &Definition::Module(module) = self + && module.is_crate_root() + { + return SearchScope::reverse_dependencies(db, module.krate()); } let module = match self.module(db) { @@ -683,51 +683,47 @@ impl<'a> FindUsages<'a> { } } else if let Some(alias) = usage.ancestors().find_map(ast::TypeAlias::cast) + && let Some(name) = alias.name() + && seen + .insert(InFileWrapper::new(file_id, name.syntax().text_range())) { - if let Some(name) = alias.name() { - if seen.insert(InFileWrapper::new( - file_id, - name.syntax().text_range(), - )) { - if let Some(def) = is_alias(&alias) { - cov_mark::hit!(container_type_alias); - insert_type_alias( - sema.db, - &mut to_process, - name.text().as_str(), - def.into(), - ); - } else { - cov_mark::hit!(same_name_different_def_type_alias); - } - } + if let Some(def) = is_alias(&alias) { + cov_mark::hit!(container_type_alias); + insert_type_alias( + sema.db, + &mut to_process, + name.text().as_str(), + def.into(), + ); + } else { + cov_mark::hit!(same_name_different_def_type_alias); } } // We need to account for `Self`. It can only refer to our type inside an impl. let impl_ = 'impl_: { for ancestor in usage.ancestors() { - if let Some(parent) = ancestor.parent() { - if let Some(parent) = ast::Impl::cast(parent) { - // Only if the GENERIC_PARAM_LIST is directly under impl, otherwise it may be in the self ty. - if matches!( - ancestor.kind(), - SyntaxKind::ASSOC_ITEM_LIST - | SyntaxKind::WHERE_CLAUSE - | SyntaxKind::GENERIC_PARAM_LIST - ) { - break; - } - if parent - .trait_() - .is_some_and(|trait_| *trait_.syntax() == ancestor) - { - break; - } - - // Otherwise, found an impl where its self ty may be our type. - break 'impl_ Some(parent); + if let Some(parent) = ancestor.parent() + && let Some(parent) = ast::Impl::cast(parent) + { + // Only if the GENERIC_PARAM_LIST is directly under impl, otherwise it may be in the self ty. + if matches!( + ancestor.kind(), + SyntaxKind::ASSOC_ITEM_LIST + | SyntaxKind::WHERE_CLAUSE + | SyntaxKind::GENERIC_PARAM_LIST + ) { + break; + } + if parent + .trait_() + .is_some_and(|trait_| *trait_.syntax() == ancestor) + { + break; } + + // Otherwise, found an impl where its self ty may be our type. + break 'impl_ Some(parent); } } None @@ -1356,11 +1352,10 @@ impl ReferenceCategory { if matches!(expr.op_kind()?, ast::BinaryOp::Assignment { .. }) { // If the variable or field ends on the LHS's end then it's a Write // (covers fields and locals). FIXME: This is not terribly accurate. - if let Some(lhs) = expr.lhs() { - if lhs.syntax().text_range().end() == r.syntax().text_range().end() { + if let Some(lhs) = expr.lhs() + && lhs.syntax().text_range().end() == r.syntax().text_range().end() { return Some(ReferenceCategory::WRITE) } - } } Some(ReferenceCategory::READ) }, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs index c15cade84a502..9c4e6f5cbf82f 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs @@ -252,10 +252,10 @@ impl SymbolIndex { let mut last_batch_start = 0; for idx in 0..symbols.len() { - if let Some(next_symbol) = symbols.get(idx + 1) { - if cmp(&symbols[last_batch_start], next_symbol) == Ordering::Equal { - continue; - } + if let Some(next_symbol) = symbols.get(idx + 1) + && cmp(&symbols[last_batch_start], next_symbol) == Ordering::Equal + { + continue; } let start = last_batch_start; @@ -371,10 +371,10 @@ impl Query { if self.exclude_imports && symbol.is_import { continue; } - if self.mode.check(&self.query, self.case_sensitive, symbol_name) { - if let Some(b) = cb(symbol).break_value() { - return Some(b); - } + if self.mode.check(&self.query, self.case_sensitive, symbol_name) + && let Some(b) = cb(symbol).break_value() + { + return Some(b); } } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs index 7e8c921d9ed39..1d4d8decf5413 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs @@ -230,11 +230,11 @@ pub fn lex_format_specifiers( skip_char_and_emit(&mut chars, FormatSpecifier::Close, &mut callback); } continue; - } else if let '}' = first_char { - if let Some((_, '}')) = chars.peek() { - // Escaped format specifier, `}}` - read_escaped_format_specifier(&mut chars, &mut callback); - } + } else if let '}' = first_char + && let Some((_, '}')) = chars.peek() + { + // Escaped format specifier, `}}` + read_escaped_format_specifier(&mut chars, &mut callback); } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs index bdff64dd0812c..cefd8fd49676e 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -79,14 +79,13 @@ pub fn preorder_expr_with_ctx_checker( continue; } }; - if let Some(let_stmt) = node.parent().and_then(ast::LetStmt::cast) { - if let_stmt.initializer().map(|it| it.syntax() != &node).unwrap_or(true) - && let_stmt.let_else().map(|it| it.syntax() != &node).unwrap_or(true) - { - // skipping potential const pat expressions in let statements - preorder.skip_subtree(); - continue; - } + if let Some(let_stmt) = node.parent().and_then(ast::LetStmt::cast) + && let_stmt.initializer().map(|it| it.syntax() != &node).unwrap_or(true) + && let_stmt.let_else().map(|it| it.syntax() != &node).unwrap_or(true) + { + // skipping potential const pat expressions in let statements + preorder.skip_subtree(); + continue; } match ast::Stmt::cast(node.clone()) { @@ -306,10 +305,10 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { Some(ast::BlockModifier::AsyncGen(_)) => (), None => (), } - if let Some(stmt_list) = b.stmt_list() { - if let Some(e) = stmt_list.tail_expr() { - for_each_tail_expr(&e, cb); - } + if let Some(stmt_list) = b.stmt_list() + && let Some(e) = stmt_list.tail_expr() + { + for_each_tail_expr(&e, cb); } } ast::Expr::IfExpr(if_) => { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs index f63cd92694b35..a91d436afcfbb 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs @@ -16,17 +16,17 @@ pub fn use_trivial_constructor( ) -> Option { match ty.as_adt() { Some(hir::Adt::Enum(x)) => { - if let &[variant] = &*x.variants(db) { - if variant.kind(db) == hir::StructKind::Unit { - let path = make::path_qualified( - path, - make::path_segment(make::name_ref( - &variant.name(db).display_no_db(edition).to_smolstr(), - )), - ); + if let &[variant] = &*x.variants(db) + && variant.kind(db) == hir::StructKind::Unit + { + let path = make::path_qualified( + path, + make::path_segment(make::name_ref( + &variant.name(db).display_no_db(edition).to_smolstr(), + )), + ); - return Some(make::expr_path(path)); - } + return Some(make::expr_path(path)); } } Some(hir::Adt::Struct(x)) if x.kind(db) == StructKind::Unit => { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index bf7dddacd8c59..742d614bc5673 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -148,37 +148,27 @@ pub(crate) fn json_in_items( allow_unstable: true, }; - if !scope_has("Serialize") { - if let Some(PathResolution::Def(it)) = serialize_resolved { - if let Some(it) = current_module.find_use_path( - sema.db, - it, - config.insert_use.prefix_kind, - cfg, - ) { - insert_use( - &scope, - mod_path_to_ast(&it, edition), - &config.insert_use, - ); - } - } + if !scope_has("Serialize") + && let Some(PathResolution::Def(it)) = serialize_resolved + && let Some(it) = current_module.find_use_path( + sema.db, + it, + config.insert_use.prefix_kind, + cfg, + ) + { + insert_use(&scope, mod_path_to_ast(&it, edition), &config.insert_use); } - if !scope_has("Deserialize") { - if let Some(PathResolution::Def(it)) = deserialize_resolved { - if let Some(it) = current_module.find_use_path( - sema.db, - it, - config.insert_use.prefix_kind, - cfg, - ) { - insert_use( - &scope, - mod_path_to_ast(&it, edition), - &config.insert_use, - ); - } - } + if !scope_has("Deserialize") + && let Some(PathResolution::Def(it)) = deserialize_resolved + && let Some(it) = current_module.find_use_path( + sema.db, + it, + config.insert_use.prefix_kind, + cfg, + ) + { + insert_use(&scope, mod_path_to_ast(&it, edition), &config.insert_use); } let mut sc = scb.finish(); sc.insert_source_edit(vfs_file_id, edit.finish()); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs index 7da799e0d490b..893bfca6a1298 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -227,12 +227,11 @@ fn get_default_constructor( // Look for a ::new() associated function let has_new_func = ty .iterate_assoc_items(ctx.sema.db, krate, |assoc_item| { - if let AssocItem::Function(func) = assoc_item { - if func.name(ctx.sema.db) == sym::new - && func.assoc_fn_params(ctx.sema.db).is_empty() - { - return Some(()); - } + if let AssocItem::Function(func) = assoc_item + && func.name(ctx.sema.db) == sym::new + && func.assoc_fn_params(ctx.sema.db).is_empty() + { + return Some(()); } None diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 8831efa311720..6e30bf92dbaa1 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -12,14 +12,14 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Option let root = ctx.sema.db.parse_or_expand(d.span.file_id); let node = d.span.value.to_node(&root); let mut span = d.span; - if let Some(parent) = node.parent() { - if ast::BinExpr::can_cast(parent.kind()) { - // In case of an assignment, the diagnostic is provided on the variable name. - // We want to expand it to include the whole assignment, but only when this - // is an ordinary assignment, not a destructuring assignment. So, the direct - // parent is an assignment expression. - span = d.span.with_value(SyntaxNodePtr::new(&parent)); - } + if let Some(parent) = node.parent() + && ast::BinExpr::can_cast(parent.kind()) + { + // In case of an assignment, the diagnostic is provided on the variable name. + // We want to expand it to include the whole assignment, but only when this + // is an ordinary assignment, not a destructuring assignment. So, the direct + // parent is an assignment expression. + span = d.span.with_value(SyntaxNodePtr::new(&parent)); }; let fixes = (|| { @@ -73,10 +73,10 @@ pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Op let ast = source.syntax(); let Some(mut_token) = token(ast, T![mut]) else { continue }; edit_builder.delete(mut_token.text_range()); - if let Some(token) = mut_token.next_token() { - if token.kind() == SyntaxKind::WHITESPACE { - edit_builder.delete(token.text_range()); - } + if let Some(token) = mut_token.next_token() + && token.kind() == SyntaxKind::WHITESPACE + { + edit_builder.delete(token.text_range()); } } let edit = edit_builder.finish(); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs index d96c658d7b048..3a6e480f55ed4 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -231,13 +231,13 @@ fn make_fixes( // If there's an existing `mod m;` statement matching the new one, don't emit a fix (it's // probably `#[cfg]`d out). for item in items.clone() { - if let ast::Item::Module(m) = item { - if let Some(name) = m.name() { - if m.item_list().is_none() && name.to_string() == new_mod_name { - cov_mark::hit!(unlinked_file_skip_fix_when_mod_already_exists); - return None; - } - } + if let ast::Item::Module(m) = item + && let Some(name) = m.name() + && m.item_list().is_none() + && name.to_string() == new_mod_name + { + cov_mark::hit!(unlinked_file_skip_fix_when_mod_already_exists); + return None; } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index 72bd66d1c8bb7..a1db92641f5ee 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -568,10 +568,10 @@ fn handle_diag_from_macros( diag.fixes = None; // All Clippy lints report in macros, see https://github.com/rust-lang/rust-clippy/blob/903293b199364/declare_clippy_lint/src/lib.rs#L172. - if let DiagnosticCode::RustcLint(lint) = diag.code { - if !LINTS_TO_REPORT_IN_EXTERNAL_MACROS.contains(lint) { - return false; - } + if let DiagnosticCode::RustcLint(lint) = diag.code + && !LINTS_TO_REPORT_IN_EXTERNAL_MACROS.contains(lint) + { + return false; }; } true @@ -760,35 +760,35 @@ fn cfg_attr_lint_attrs( } while let Some(value) = iter.next() { - if let Some(token) = value.as_token() { - if token.kind() == SyntaxKind::IDENT { - let severity = match token.text() { - "allow" | "expect" => Some(Severity::Allow), - "warn" => Some(Severity::Warning), - "forbid" | "deny" => Some(Severity::Error), - "cfg_attr" => { - if let Some(NodeOrToken::Node(value)) = iter.next() { - cfg_attr_lint_attrs(sema, &value, lint_attrs); - } - None - } - _ => None, - }; - if let Some(severity) = severity { - let lints = iter.next(); - if let Some(NodeOrToken::Node(lints)) = lints { - lint_attrs.push((severity, lints)); + if let Some(token) = value.as_token() + && token.kind() == SyntaxKind::IDENT + { + let severity = match token.text() { + "allow" | "expect" => Some(Severity::Allow), + "warn" => Some(Severity::Warning), + "forbid" | "deny" => Some(Severity::Error), + "cfg_attr" => { + if let Some(NodeOrToken::Node(value)) = iter.next() { + cfg_attr_lint_attrs(sema, &value, lint_attrs); } + None + } + _ => None, + }; + if let Some(severity) = severity { + let lints = iter.next(); + if let Some(NodeOrToken::Node(lints)) = lints { + lint_attrs.push((severity, lints)); } } } } - if prev_len != lint_attrs.len() { - if let Some(false) | None = sema.check_cfg_attr(value) { - // Discard the attributes when the condition is false. - lint_attrs.truncate(prev_len); - } + if prev_len != lint_attrs.len() + && let Some(false) | None = sema.check_cfg_attr(value) + { + // Discard the attributes when the condition is false. + lint_attrs.truncate(prev_len); } } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs index e4b20f3f1aad6..138af22089eb4 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs @@ -283,17 +283,16 @@ impl<'db> MatchFinder<'db> { node: node.clone(), }); } - } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) { - if let Some(expanded) = self.sema.expand_macro_call(¯o_call) { - if let Some(tt) = macro_call.token_tree() { - self.output_debug_for_nodes_at_range( - &expanded.value, - range, - &Some(self.sema.original_range(tt.syntax())), - out, - ); - } - } + } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) + && let Some(expanded) = self.sema.expand_macro_call(¯o_call) + && let Some(tt) = macro_call.token_tree() + { + self.output_debug_for_nodes_at_range( + &expanded.value, + range, + &Some(self.sema.original_range(tt.syntax())), + out, + ); } self.output_debug_for_nodes_at_range(&node, range, restrict_range, out); } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs index b350315ba5489..f21132c297ee8 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs @@ -156,12 +156,11 @@ impl<'db, 'sema> Matcher<'db, 'sema> { /// processing a macro expansion and we want to fail the match if we're working with a node that /// didn't originate from the token tree of the macro call. fn validate_range(&self, range: &FileRange) -> Result<(), MatchFailed> { - if let Some(restrict_range) = &self.restrict_range { - if restrict_range.file_id != range.file_id - || !restrict_range.range.contains_range(range.range) - { - fail_match!("Node originated from a macro"); - } + if let Some(restrict_range) = &self.restrict_range + && (restrict_range.file_id != range.file_id + || !restrict_range.range.contains_range(range.range)) + { + fail_match!("Node originated from a macro"); } Ok(()) } @@ -404,30 +403,27 @@ impl<'db, 'sema> Matcher<'db, 'sema> { // Build a map keyed by field name. let mut fields_by_name: FxHashMap = FxHashMap::default(); for child in code.children() { - if let Some(record) = ast::RecordExprField::cast(child.clone()) { - if let Some(name) = record.field_name() { - fields_by_name.insert(name.text().into(), child.clone()); - } + if let Some(record) = ast::RecordExprField::cast(child.clone()) + && let Some(name) = record.field_name() + { + fields_by_name.insert(name.text().into(), child.clone()); } } for p in pattern.children_with_tokens() { - if let SyntaxElement::Node(p) = p { - if let Some(name_element) = p.first_child_or_token() { - if self.get_placeholder(&name_element).is_some() { - // If the pattern is using placeholders for field names then order - // independence doesn't make sense. Fall back to regular ordered - // matching. - return self.attempt_match_node_children(phase, pattern, code); - } - if let Some(ident) = only_ident(name_element) { - let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| { - match_error!( - "Placeholder has record field '{}', but code doesn't", - ident - ) - })?; - self.attempt_match_node(phase, &p, &code_record)?; - } + if let SyntaxElement::Node(p) = p + && let Some(name_element) = p.first_child_or_token() + { + if self.get_placeholder(&name_element).is_some() { + // If the pattern is using placeholders for field names then order + // independence doesn't make sense. Fall back to regular ordered + // matching. + return self.attempt_match_node_children(phase, pattern, code); + } + if let Some(ident) = only_ident(name_element) { + let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| { + match_error!("Placeholder has record field '{}', but code doesn't", ident) + })?; + self.attempt_match_node(phase, &p, &code_record)?; } } } @@ -476,14 +472,13 @@ impl<'db, 'sema> Matcher<'db, 'sema> { } } SyntaxElement::Node(n) => { - if let Some(first_token) = n.first_token() { - if Some(first_token.text()) == next_pattern_token.as_deref() { - if let Some(SyntaxElement::Node(p)) = pattern.next() { - // We have a subtree that starts with the next token in our pattern. - self.attempt_match_token_tree(phase, &p, n)?; - break; - } - } + if let Some(first_token) = n.first_token() + && Some(first_token.text()) == next_pattern_token.as_deref() + && let Some(SyntaxElement::Node(p)) = pattern.next() + { + // We have a subtree that starts with the next token in our pattern. + self.attempt_match_token_tree(phase, &p, n)?; + break; } } }; @@ -562,23 +557,22 @@ impl<'db, 'sema> Matcher<'db, 'sema> { let deref_count = self.check_expr_type(pattern_type, expr)?; let pattern_receiver = pattern_args.next(); self.attempt_match_opt(phase, pattern_receiver.clone(), code.receiver())?; - if let Phase::Second(match_out) = phase { - if let Some(placeholder_value) = pattern_receiver + if let Phase::Second(match_out) = phase + && let Some(placeholder_value) = pattern_receiver .and_then(|n| self.get_placeholder_for_node(n.syntax())) .and_then(|placeholder| { match_out.placeholder_values.get_mut(&placeholder.ident) }) - { - placeholder_value.autoderef_count = deref_count; - placeholder_value.autoref_kind = self - .sema - .resolve_method_call_as_callable(code) - .and_then(|callable| { - let (self_param, _) = callable.receiver_param(self.sema.db)?; - Some(self.sema.source(self_param)?.value.kind()) - }) - .unwrap_or(ast::SelfParamKind::Owned); - } + { + placeholder_value.autoderef_count = deref_count; + placeholder_value.autoref_kind = self + .sema + .resolve_method_call_as_callable(code) + .and_then(|callable| { + let (self_param, _) = callable.receiver_param(self.sema.db)?; + Some(self.sema.source(self_param)?.value.kind()) + }) + .unwrap_or(ast::SelfParamKind::Owned); } } } else { @@ -698,12 +692,11 @@ impl Phase<'_> { } fn record_ignored_comments(&mut self, token: &SyntaxToken) { - if token.kind() == SyntaxKind::COMMENT { - if let Phase::Second(match_out) = self { - if let Some(comment) = ast::Comment::cast(token.clone()) { - match_out.ignored_comments.push(comment); - } - } + if token.kind() == SyntaxKind::COMMENT + && let Phase::Second(match_out) = self + && let Some(comment) = ast::Comment::cast(token.clone()) + { + match_out.ignored_comments.push(comment); } } } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs index 752edd6535a63..16287a439c358 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs @@ -112,12 +112,12 @@ impl<'db> ReplacementRenderer<'_, 'db> { self.out.push_str(&mod_path.display(self.db, self.edition).to_string()); // Emit everything except for the segment's name-ref, since we already effectively // emitted that as part of `mod_path`. - if let Some(path) = ast::Path::cast(node.clone()) { - if let Some(segment) = path.segment() { - for node_or_token in segment.syntax().children_with_tokens() { - if node_or_token.kind() != SyntaxKind::NAME_REF { - self.render_node_or_token(&node_or_token); - } + if let Some(path) = ast::Path::cast(node.clone()) + && let Some(segment) = path.segment() + { + for node_or_token in segment.syntax().children_with_tokens() { + if node_or_token.kind() != SyntaxKind::NAME_REF { + self.render_node_or_token(&node_or_token); } } } @@ -242,15 +242,15 @@ fn token_is_method_call_receiver(token: &SyntaxToken) -> bool { } fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option { - if ast::Expr::can_cast(kind) { - if let Ok(expr) = fragments::expr(code) { - return Some(expr); - } + if ast::Expr::can_cast(kind) + && let Ok(expr) = fragments::expr(code) + { + return Some(expr); } - if ast::Item::can_cast(kind) { - if let Ok(item) = fragments::item(code) { - return Some(item); - } + if ast::Item::can_cast(kind) + && let Ok(item) = fragments::item(code) + { + return Some(item); } None } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs index 8f28a1cd3a623..a4e2cfbaee27d 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs @@ -83,21 +83,17 @@ impl<'db> Resolver<'_, 'db> { let ufcs_function_calls = resolved_paths .iter() .filter_map(|(path_node, resolved)| { - if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) { - if let Some(call_expr) = ast::CallExpr::cast(grandparent.clone()) { - if let hir::PathResolution::Def(hir::ModuleDef::Function(function)) = - resolved.resolution - { - if function.as_assoc_item(self.resolution_scope.scope.db).is_some() { - let qualifier_type = - self.resolution_scope.qualifier_type(path_node); - return Some(( - grandparent, - UfcsCallInfo { call_expr, function, qualifier_type }, - )); - } - } - } + if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) + && let Some(call_expr) = ast::CallExpr::cast(grandparent.clone()) + && let hir::PathResolution::Def(hir::ModuleDef::Function(function)) = + resolved.resolution + && function.as_assoc_item(self.resolution_scope.scope.db).is_some() + { + let qualifier_type = self.resolution_scope.qualifier_type(path_node); + return Some(( + grandparent, + UfcsCallInfo { call_expr, function, qualifier_type }, + )); } None }) @@ -153,12 +149,11 @@ impl<'db> Resolver<'_, 'db> { /// Returns whether `path` contains a placeholder, but ignores any placeholders within type /// arguments. fn path_contains_placeholder(&self, path: &ast::Path) -> bool { - if let Some(segment) = path.segment() { - if let Some(name_ref) = segment.name_ref() { - if self.placeholders_by_stand_in.contains_key(name_ref.text().as_str()) { - return true; - } - } + if let Some(segment) = path.segment() + && let Some(name_ref) = segment.name_ref() + && self.placeholders_by_stand_in.contains_key(name_ref.text().as_str()) + { + return true; } if let Some(qualifier) = path.qualifier() { return self.path_contains_placeholder(&qualifier); @@ -252,14 +247,12 @@ impl<'db> ResolutionScope<'db> { fn qualifier_type(&self, path: &SyntaxNode) -> Option> { use syntax::ast::AstNode; - if let Some(path) = ast::Path::cast(path.clone()) { - if let Some(qualifier) = path.qualifier() { - if let Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) = - self.resolve_path(&qualifier) - { - return Some(adt.ty(self.scope.db)); - } - } + if let Some(path) = ast::Path::cast(path.clone()) + && let Some(qualifier) = path.qualifier() + && let Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) = + self.resolve_path(&qualifier) + { + return Some(adt.ty(self.scope.db)); } None } @@ -299,11 +292,11 @@ fn pick_node_for_resolution(node: SyntaxNode) -> SyntaxNode { /// Returns whether `path` or any of its qualifiers contains type arguments. fn path_contains_type_arguments(path: Option) -> bool { if let Some(path) = path { - if let Some(segment) = path.segment() { - if segment.generic_arg_list().is_some() { - cov_mark::hit!(type_arguments_within_path); - return true; - } + if let Some(segment) = path.segment() + && segment.generic_arg_list().is_some() + { + cov_mark::hit!(type_arguments_within_path); + return true; } return path_contains_type_arguments(path.qualifier()); } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs index 99a98fb2a7130..72f857ceda903 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs @@ -187,16 +187,15 @@ impl<'db> MatchFinder<'db> { self.try_add_match(rule, code, restrict_range, matches_out); // If we've got a macro call, we already tried matching it pre-expansion, which is the only // way to match the whole macro, now try expanding it and matching the expansion. - if let Some(macro_call) = ast::MacroCall::cast(code.clone()) { - if let Some(expanded) = self.sema.expand_macro_call(¯o_call) { - if let Some(tt) = macro_call.token_tree() { - // When matching within a macro expansion, we only want to allow matches of - // nodes that originated entirely from within the token tree of the macro call. - // i.e. we don't want to match something that came from the macro itself. - if let Some(range) = self.sema.original_range_opt(tt.syntax()) { - self.slow_scan_node(&expanded.value, rule, &Some(range), matches_out); - } - } + if let Some(macro_call) = ast::MacroCall::cast(code.clone()) + && let Some(expanded) = self.sema.expand_macro_call(¯o_call) + && let Some(tt) = macro_call.token_tree() + { + // When matching within a macro expansion, we only want to allow matches of + // nodes that originated entirely from within the token tree of the macro call. + // i.e. we don't want to match something that came from the macro itself. + if let Some(range) = self.sema.original_range_opt(tt.syntax()) { + self.slow_scan_node(&expanded.value, rule, &Some(range), matches_out); } } for child in code.children() { @@ -241,10 +240,10 @@ impl<'db> MatchFinder<'db> { /// Returns whether we support matching within `node` and all of its ancestors. fn is_search_permitted_ancestors(node: &SyntaxNode) -> bool { - if let Some(parent) = node.parent() { - if !is_search_permitted_ancestors(&parent) { - return false; - } + if let Some(parent) = node.parent() + && !is_search_permitted_ancestors(&parent) + { + return false; } is_search_permitted(node) } diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations.rs b/src/tools/rust-analyzer/crates/ide/src/annotations.rs index 05196ac98c03e..dec1889926dad 100644 --- a/src/tools/rust-analyzer/crates/ide/src/annotations.rs +++ b/src/tools/rust-analyzer/crates/ide/src/annotations.rs @@ -159,10 +159,10 @@ pub(crate) fn annotations( node.value.syntax().text_range(), Some(name), ); - if res.call_site.0.file_id == source_file_id { - if let Some(name_range) = res.call_site.1 { - return Some((res.call_site.0.range, Some(name_range))); - } + if res.call_site.0.file_id == source_file_id + && let Some(name_range) = res.call_site.1 + { + return Some((res.call_site.0.range, Some(name_range))); } }; // otherwise try upmapping the entire node out of attributes diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs index f31886b969766..ad84eacfb3e88 100644 --- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs +++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs @@ -96,14 +96,14 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< let (name, expanded, kind) = loop { let node = anc.next()?; - if let Some(item) = ast::Item::cast(node.clone()) { - if let Some(def) = sema.resolve_attr_macro_call(&item) { - break ( - def.name(db).display(db, file_id.edition(db)).to_string(), - expand_macro_recur(&sema, &item, &mut error, &mut span_map, TextSize::new(0))?, - SyntaxKind::MACRO_ITEMS, - ); - } + if let Some(item) = ast::Item::cast(node.clone()) + && let Some(def) = sema.resolve_attr_macro_call(&item) + { + break ( + def.name(db).display(db, file_id.edition(db)).to_string(), + expand_macro_recur(&sema, &item, &mut error, &mut span_map, TextSize::new(0))?, + SyntaxKind::MACRO_ITEMS, + ); } if let Some(mac) = ast::MacroCall::cast(node) { let mut name = mac.path()?.segment()?.name_ref()?.to_string(); diff --git a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs index a374f9752fcfa..2926384c40786 100644 --- a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs +++ b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs @@ -81,10 +81,10 @@ fn try_extend_selection( if token.text_range() != range { return Some(token.text_range()); } - if let Some(comment) = ast::Comment::cast(token.clone()) { - if let Some(range) = extend_comments(comment) { - return Some(range); - } + if let Some(comment) = ast::Comment::cast(token.clone()) + && let Some(range) = extend_comments(comment) + { + return Some(range); } token.parent()? } @@ -92,12 +92,11 @@ fn try_extend_selection( }; // if we are in single token_tree, we maybe live in macro or attr - if node.kind() == TOKEN_TREE { - if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { - if let Some(range) = extend_tokens_from_range(sema, macro_call, range) { - return Some(range); - } - } + if node.kind() == TOKEN_TREE + && let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) + && let Some(range) = extend_tokens_from_range(sema, macro_call, range) + { + return Some(range); } if node.text_range() != range { @@ -106,10 +105,10 @@ fn try_extend_selection( let node = shallowest_node(&node); - if node.parent().is_some_and(|n| list_kinds.contains(&n.kind())) { - if let Some(range) = extend_list_item(&node) { - return Some(range); - } + if node.parent().is_some_and(|n| list_kinds.contains(&n.kind())) + && let Some(range) = extend_list_item(&node) + { + return Some(range); } node.parent().map(|it| it.text_range()) @@ -221,19 +220,20 @@ fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start(); let ws_suffix = &ws_text[suffix]; let ws_prefix = &ws_text[prefix]; - if ws_text.contains('\n') && !ws_suffix.contains('\n') { - if let Some(node) = ws.next_sibling_or_token() { - let start = match ws_prefix.rfind('\n') { - Some(idx) => ws.text_range().start() + TextSize::from((idx + 1) as u32), - None => node.text_range().start(), - }; - let end = if root.text().char_at(node.text_range().end()) == Some('\n') { - node.text_range().end() + TextSize::of('\n') - } else { - node.text_range().end() - }; - return TextRange::new(start, end); - } + if ws_text.contains('\n') + && !ws_suffix.contains('\n') + && let Some(node) = ws.next_sibling_or_token() + { + let start = match ws_prefix.rfind('\n') { + Some(idx) => ws.text_range().start() + TextSize::from((idx + 1) as u32), + None => node.text_range().start(), + }; + let end = if root.text().char_at(node.text_range().end()) == Some('\n') { + node.text_range().end() + TextSize::of('\n') + } else { + node.text_range().end() + }; + return TextRange::new(start, end); } ws.text_range() } diff --git a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs index 1901bcc797e77..ac64413effebf 100755 --- a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs +++ b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs @@ -61,30 +61,29 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { }; if is_multiline { // for the func with multiline param list - if matches!(element.kind(), FN) { - if let NodeOrToken::Node(node) = &element { - if let Some(fn_node) = ast::Fn::cast(node.clone()) { - if !fn_node - .param_list() - .map(|param_list| param_list.syntax().text().contains_char('\n')) - .unwrap_or(false) - { - continue; - } + if matches!(element.kind(), FN) + && let NodeOrToken::Node(node) = &element + && let Some(fn_node) = ast::Fn::cast(node.clone()) + { + if !fn_node + .param_list() + .map(|param_list| param_list.syntax().text().contains_char('\n')) + .unwrap_or(false) + { + continue; + } - if fn_node.body().is_some() { - // Get the actual start of the function (excluding doc comments) - let fn_start = fn_node - .fn_token() - .map(|token| token.text_range().start()) - .unwrap_or(node.text_range().start()); - res.push(Fold { - range: TextRange::new(fn_start, node.text_range().end()), - kind: FoldKind::Function, - }); - continue; - } - } + if fn_node.body().is_some() { + // Get the actual start of the function (excluding doc comments) + let fn_start = fn_node + .fn_token() + .map(|token| token.text_range().start()) + .unwrap_or(node.text_range().start()); + res.push(Fold { + range: TextRange::new(fn_start, node.text_range().end()), + kind: FoldKind::Function, + }); + continue; } } res.push(Fold { range: element.text_range(), kind }); @@ -120,14 +119,13 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { match_ast! { match node { ast::Module(module) => { - if module.item_list().is_none() { - if let Some(range) = contiguous_range_for_item_group( + if module.item_list().is_none() + && let Some(range) = contiguous_range_for_item_group( module, &mut visited_nodes, ) { res.push(Fold { range, kind: FoldKind::Modules }) } - } }, ast::Use(use_) => { if let Some(range) = contiguous_range_for_item_group(use_, &mut visited_nodes) { @@ -212,11 +210,11 @@ where for element in first.syntax().siblings_with_tokens(Direction::Next) { let node = match element { NodeOrToken::Token(token) => { - if let Some(ws) = ast::Whitespace::cast(token) { - if !ws.spans_multiple_lines() { - // Ignore whitespace without blank lines - continue; - } + if let Some(ws) = ast::Whitespace::cast(token) + && !ws.spans_multiple_lines() + { + // Ignore whitespace without blank lines + continue; } // There is a blank line or another token, which means that the // group ends here @@ -270,21 +268,21 @@ fn contiguous_range_for_comment( for element in first.syntax().siblings_with_tokens(Direction::Next) { match element { NodeOrToken::Token(token) => { - if let Some(ws) = ast::Whitespace::cast(token.clone()) { - if !ws.spans_multiple_lines() { - // Ignore whitespace without blank lines - continue; - } + if let Some(ws) = ast::Whitespace::cast(token.clone()) + && !ws.spans_multiple_lines() + { + // Ignore whitespace without blank lines + continue; } - if let Some(c) = ast::Comment::cast(token) { - if c.kind() == group_kind { - let text = c.text().trim_start(); - // regions are not real comments - if !(text.starts_with(REGION_START) || text.starts_with(REGION_END)) { - visited.insert(c.clone()); - last = c; - continue; - } + if let Some(c) = ast::Comment::cast(token) + && c.kind() == group_kind + { + let text = c.text().trim_start(); + // regions are not real comments + if !(text.starts_with(REGION_START) || text.starts_with(REGION_END)) { + visited.insert(c.clone()); + last = c; + continue; } } // The comment group ends because either: diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index 29fc68bb50f19..84e41277390ff 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -94,18 +94,17 @@ pub(crate) fn goto_definition( let parent = token.value.parent()?; let token_file_id = token.file_id; - if let Some(token) = ast::String::cast(token.value.clone()) { - if let Some(x) = + if let Some(token) = ast::String::cast(token.value.clone()) + && let Some(x) = try_lookup_include_path(sema, InFile::new(token_file_id, token), file_id) - { - return Some(vec![x]); - } + { + return Some(vec![x]); } - if ast::TokenTree::can_cast(parent.kind()) { - if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token.value) { - return Some(vec![x]); - } + if ast::TokenTree::can_cast(parent.kind()) + && let Some(x) = try_lookup_macro_def_in_macro_use(sema, token.value) + { + return Some(vec![x]); } Some( @@ -245,12 +244,11 @@ fn try_lookup_macro_def_in_macro_use( let krate = extern_crate.resolved_crate(sema.db)?; for mod_def in krate.root_module().declarations(sema.db) { - if let ModuleDef::Macro(mac) = mod_def { - if mac.name(sema.db).as_str() == token.text() { - if let Some(nav) = mac.try_to_nav(sema.db) { - return Some(nav.call_site); - } - } + if let ModuleDef::Macro(mac) = mod_def + && mac.name(sema.db).as_str() == token.text() + && let Some(nav) = mac.try_to_nav(sema.db) + { + return Some(nav.call_site); } } diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs index 356bd69aa44ea..9960e79a5380f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs +++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs @@ -722,20 +722,19 @@ impl<'a> WalkExpandedExprCtx<'a> { self.depth += 1; } - if let ast::Expr::MacroExpr(expr) = expr { - if let Some(expanded) = + if let ast::Expr::MacroExpr(expr) = expr + && let Some(expanded) = expr.macro_call().and_then(|call| self.sema.expand_macro_call(&call)) - { - match_ast! { - match (expanded.value) { - ast::MacroStmts(it) => { - self.handle_expanded(it, cb); - }, - ast::Expr(it) => { - self.walk(&it, cb); - }, - _ => {} - } + { + match_ast! { + match (expanded.value) { + ast::MacroStmts(it) => { + self.handle_expanded(it, cb); + }, + ast::Expr(it) => { + self.walk(&it, cb); + }, + _ => {} } } } @@ -755,10 +754,10 @@ impl<'a> WalkExpandedExprCtx<'a> { } for stmt in expanded.statements() { - if let ast::Stmt::ExprStmt(stmt) = stmt { - if let Some(expr) = stmt.expr() { - self.walk(&expr, cb); - } + if let ast::Stmt::ExprStmt(stmt) = stmt + && let Some(expr) = stmt.expr() + { + self.walk(&expr, cb); } } } @@ -806,12 +805,12 @@ pub(crate) fn highlight_unsafe_points( push_to_highlights(unsafe_token_file_id, Some(unsafe_token.text_range())); // highlight unsafe operations - if let Some(block) = block_expr { - if let Some(body) = sema.body_for(InFile::new(unsafe_token_file_id, block.syntax())) { - let unsafe_ops = sema.get_unsafe_ops(body); - for unsafe_op in unsafe_ops { - push_to_highlights(unsafe_op.file_id, Some(unsafe_op.value.text_range())); - } + if let Some(block) = block_expr + && let Some(body) = sema.body_for(InFile::new(unsafe_token_file_id, block.syntax())) + { + let unsafe_ops = sema.get_unsafe_ops(body); + for unsafe_op in unsafe_ops { + push_to_highlights(unsafe_op.file_id, Some(unsafe_op.value.text_range())); } } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index e4d6279759ed7..44c98a43f6944 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -244,17 +244,15 @@ fn hover_offset( let node = token.parent()?; // special case macro calls, we wanna render the invoked arm index - if let Some(name) = ast::NameRef::cast(node.clone()) { - if let Some(path_seg) = + if let Some(name) = ast::NameRef::cast(node.clone()) + && let Some(path_seg) = name.syntax().parent().and_then(ast::PathSegment::cast) - { - if let Some(macro_call) = path_seg + && let Some(macro_call) = path_seg .parent_path() .syntax() .parent() .and_then(ast::MacroCall::cast) - { - if let Some(macro_) = sema.resolve_macro_call(¯o_call) { + && let Some(macro_) = sema.resolve_macro_call(¯o_call) { break 'a vec![( (Definition::Macro(macro_), None), sema.resolve_macro_call_arm(¯o_call), @@ -262,9 +260,6 @@ fn hover_offset( node, )]; } - } - } - } match IdentClass::classify_node(sema, &node)? { // It's better for us to fall back to the keyword hover here, diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs index 670210d4998dd..51b5900e8155a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs @@ -95,23 +95,25 @@ pub(super) fn try_expr( if let Some((hir::Adt::Enum(inner), hir::Adt::Enum(body))) = adts { let famous_defs = FamousDefs(sema, sema.scope(try_expr.syntax())?.krate()); // special case for two options, there is no value in showing them - if let Some(option_enum) = famous_defs.core_option_Option() { - if inner == option_enum && body == option_enum { - cov_mark::hit!(hover_try_expr_opt_opt); - return None; - } + if let Some(option_enum) = famous_defs.core_option_Option() + && inner == option_enum + && body == option_enum + { + cov_mark::hit!(hover_try_expr_opt_opt); + return None; } // special case two results to show the error variants only - if let Some(result_enum) = famous_defs.core_result_Result() { - if inner == result_enum && body == result_enum { - let error_type_args = - inner_ty.type_arguments().nth(1).zip(body_ty.type_arguments().nth(1)); - if let Some((inner, body)) = error_type_args { - inner_ty = inner; - body_ty = body; - "Try Error".clone_into(&mut s); - } + if let Some(result_enum) = famous_defs.core_result_Result() + && inner == result_enum + && body == result_enum + { + let error_type_args = + inner_ty.type_arguments().nth(1).zip(body_ty.type_arguments().nth(1)); + if let Some((inner, body)) = error_type_args { + inner_ty = inner; + body_ty = body; + "Try Error".clone_into(&mut s); } } } @@ -1132,10 +1134,10 @@ fn markup( ) -> (Markup, Option) { let mut buf = String::new(); - if let Some(mod_path) = mod_path { - if !mod_path.is_empty() { - format_to!(buf, "```rust\n{}\n```\n\n", mod_path); - } + if let Some(mod_path) = mod_path + && !mod_path.is_empty() + { + format_to!(buf, "```rust\n{}\n```\n\n", mod_path); } format_to!(buf, "```rust\n{}\n```", rust); @@ -1217,55 +1219,55 @@ fn render_memory_layout( format_to!(label, ", "); } - if let Some(render) = config.offset { - if let Some(offset) = offset(&layout) { - format_to!(label, "offset = "); - match render { - MemoryLayoutHoverRenderKind::Decimal => format_to!(label, "{offset}"), - MemoryLayoutHoverRenderKind::Hexadecimal => format_to!(label, "{offset:#X}"), - MemoryLayoutHoverRenderKind::Both if offset >= 10 => { - format_to!(label, "{offset} ({offset:#X})") - } - MemoryLayoutHoverRenderKind::Both => { - format_to!(label, "{offset}") - } + if let Some(render) = config.offset + && let Some(offset) = offset(&layout) + { + format_to!(label, "offset = "); + match render { + MemoryLayoutHoverRenderKind::Decimal => format_to!(label, "{offset}"), + MemoryLayoutHoverRenderKind::Hexadecimal => format_to!(label, "{offset:#X}"), + MemoryLayoutHoverRenderKind::Both if offset >= 10 => { + format_to!(label, "{offset} ({offset:#X})") + } + MemoryLayoutHoverRenderKind::Both => { + format_to!(label, "{offset}") } - format_to!(label, ", "); } + format_to!(label, ", "); } - if let Some(render) = config.padding { - if let Some((padding_name, padding)) = padding(&layout) { - format_to!(label, "{padding_name} = "); - match render { - MemoryLayoutHoverRenderKind::Decimal => format_to!(label, "{padding}"), - MemoryLayoutHoverRenderKind::Hexadecimal => format_to!(label, "{padding:#X}"), - MemoryLayoutHoverRenderKind::Both if padding >= 10 => { - format_to!(label, "{padding} ({padding:#X})") - } - MemoryLayoutHoverRenderKind::Both => { - format_to!(label, "{padding}") - } + if let Some(render) = config.padding + && let Some((padding_name, padding)) = padding(&layout) + { + format_to!(label, "{padding_name} = "); + match render { + MemoryLayoutHoverRenderKind::Decimal => format_to!(label, "{padding}"), + MemoryLayoutHoverRenderKind::Hexadecimal => format_to!(label, "{padding:#X}"), + MemoryLayoutHoverRenderKind::Both if padding >= 10 => { + format_to!(label, "{padding} ({padding:#X})") + } + MemoryLayoutHoverRenderKind::Both => { + format_to!(label, "{padding}") } - format_to!(label, ", "); } + format_to!(label, ", "); } - if config.niches { - if let Some(niches) = layout.niches() { - if niches > 1024 { - if niches.is_power_of_two() { - format_to!(label, "niches = 2{}, ", pwr2_to_exponent(niches)); - } else if is_pwr2plus1(niches) { - format_to!(label, "niches = 2{} + 1, ", pwr2_to_exponent(niches - 1)); - } else if is_pwr2minus1(niches) { - format_to!(label, "niches = 2{} - 1, ", pwr2_to_exponent(niches + 1)); - } else { - format_to!(label, "niches = a lot, "); - } + if config.niches + && let Some(niches) = layout.niches() + { + if niches > 1024 { + if niches.is_power_of_two() { + format_to!(label, "niches = 2{}, ", pwr2_to_exponent(niches)); + } else if is_pwr2plus1(niches) { + format_to!(label, "niches = 2{} + 1, ", pwr2_to_exponent(niches - 1)); + } else if is_pwr2minus1(niches) { + format_to!(label, "niches = 2{} - 1, ", pwr2_to_exponent(niches + 1)); } else { - format_to!(label, "niches = {niches}, "); + format_to!(label, "niches = a lot, "); } + } else { + format_to!(label, "niches = {niches}, "); } } label.pop(); // ' ' diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index 671fddb436309..7a8514c47af95 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -576,13 +576,13 @@ impl InlayHintLabel { } pub fn append_part(&mut self, part: InlayHintLabelPart) { - if part.linked_location.is_none() && part.tooltip.is_none() { - if let Some(InlayHintLabelPart { text, linked_location: None, tooltip: None }) = + if part.linked_location.is_none() + && part.tooltip.is_none() + && let Some(InlayHintLabelPart { text, linked_location: None, tooltip: None }) = self.parts.last_mut() - { - text.push_str(&part.text); - return; - } + { + text.push_str(&part.text); + return; } self.parts.push(part); } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs index 49b43fc37f24a..4d020bac3aad4 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs @@ -39,10 +39,10 @@ pub(super) fn hints( if let ast::Expr::ParenExpr(_) = expr { return None; } - if let ast::Expr::BlockExpr(b) = expr { - if !b.is_standalone() { - return None; - } + if let ast::Expr::BlockExpr(b) = expr + && !b.is_standalone() + { + return None; } let descended = sema.descend_node_into_attributes(expr.clone()).pop(); diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs index 729349365e6c8..922e9598aa017 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs @@ -41,13 +41,11 @@ pub(super) fn hints( Some(it.colon_token()) }, ast::LetStmt(it) => { - if config.hide_closure_initialization_hints { - if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() { - if closure_has_block_body(&closure) { + if config.hide_closure_initialization_hints + && let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() + && closure_has_block_body(&closure) { return None; } - } - } if it.ty().is_some() { return None; } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs index ff157fa171b50..a8bb652fda226 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs @@ -51,12 +51,11 @@ pub(super) fn hints( if ty.is_unknown() { return None; } - if matches!(expr, ast::Expr::PathExpr(_)) { - if let Some(hir::Adt::Struct(st)) = ty.as_adt() { - if st.fields(sema.db).is_empty() { - return None; - } - } + if matches!(expr, ast::Expr::PathExpr(_)) + && let Some(hir::Adt::Struct(st)) = ty.as_adt() + && st.fields(sema.db).is_empty() + { + return None; } let label = label_of_ty(famous_defs, config, &ty, display_target)?; acc.push(InlayHint { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs index 05253b6794891..e80c9dc9d4732 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs @@ -120,11 +120,11 @@ pub(super) fn hints( }; if let Some(mut next) = closing_token.next_token() { - if next.kind() == T![;] { - if let Some(tok) = next.next_token() { - closing_token = next; - next = tok; - } + if next.kind() == T![;] + && let Some(tok) = next.next_token() + { + closing_token = next; + next = tok; } if !(next.kind() == SyntaxKind::WHITESPACE && next.text().contains('\n')) { // Only display the hint if the `}` is the last token on the line diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs index 9e600b5455be2..fef1cb83c1195 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs @@ -55,11 +55,9 @@ pub(super) fn hints( // Insert braces if necessary let insert_braces = |builder: &mut TextEditBuilder| { - if !has_block_body { - if let Some(range) = closure.body().map(|b| b.syntax().text_range()) { - builder.insert(range.start(), "{ ".to_owned()); - builder.insert(range.end(), " }".to_owned()); - } + if !has_block_body && let Some(range) = closure.body().map(|b| b.syntax().text_range()) { + builder.insert(range.start(), "{ ".to_owned()); + builder.insert(range.end(), " }".to_owned()); } }; diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs index 88152bf3e3883..491018a4dda84 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs @@ -81,10 +81,10 @@ fn item_hint( text_edit: Some(config.lazy_text_edit(|| { let mut builder = TextEdit::builder(); builder.insert(token.text_range().start(), "unsafe ".to_owned()); - if extern_block.unsafe_token().is_none() { - if let Some(abi) = extern_block.abi() { - builder.insert(abi.syntax().text_range().start(), "unsafe ".to_owned()); - } + if extern_block.unsafe_token().is_none() + && let Some(abi) = extern_block.abi() + { + builder.insert(abi.syntax().text_range().start(), "unsafe ".to_owned()); } builder.finish() })), diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs index 6e1b3bdbdf039..1fddb6fbe01d1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs @@ -33,10 +33,10 @@ pub(crate) fn hints( let mut args = generic_arg_list.generic_args().peekable(); let start_with_lifetime = matches!(args.peek()?, ast::GenericArg::LifetimeArg(_)); let params = generic_def.params(sema.db).into_iter().filter(|p| { - if let hir::GenericParam::TypeParam(it) = p { - if it.is_implicit(sema.db) { - return false; - } + if let hir::GenericParam::TypeParam(it) = p + && it.is_implicit(sema.db) + { + return false; } if !start_with_lifetime { return !matches!(p, hir::GenericParam::LifetimeParam(_)); diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs index 7212efd954e88..bddce904dfdea 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs @@ -22,30 +22,31 @@ pub(super) fn hints( return None; } - if let Either::Right(it) = &statik_or_const { - if ast::AssocItemList::can_cast( + if let Either::Right(it) = &statik_or_const + && ast::AssocItemList::can_cast( it.syntax().parent().map_or(SyntaxKind::EOF, |it| it.kind()), - ) { - return None; - } + ) + { + return None; } - if let Some(ast::Type::RefType(ty)) = statik_or_const.either(|it| it.ty(), |it| it.ty()) { - if ty.lifetime().is_none() { - let t = ty.amp_token()?; - acc.push(InlayHint { - range: t.text_range(), - kind: InlayKind::Lifetime, - label: "'static".into(), - text_edit: Some(config.lazy_text_edit(|| { - TextEdit::insert(t.text_range().start(), "'static ".into()) - })), - position: InlayHintPosition::After, - pad_left: false, - pad_right: true, - resolve_parent: None, - }); - } + if let Some(ast::Type::RefType(ty)) = statik_or_const.either(|it| it.ty(), |it| it.ty()) + && ty.lifetime().is_none() + { + let t = ty.amp_token()?; + acc.push(InlayHint { + range: t.text_range(), + kind: InlayKind::Lifetime, + label: "'static".into(), + text_edit: Some( + config + .lazy_text_edit(|| TextEdit::insert(t.text_range().start(), "'static ".into())), + ), + position: InlayHintPosition::After, + pad_left: false, + pad_right: true, + resolve_parent: None, + }); } Some(()) diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs index 49fec0a793c3a..a89c53e00b3b0 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs @@ -324,35 +324,35 @@ fn hints_( // apply hints // apply output if required - if let (Some(output_lt), Some(r)) = (&output, ret_type) { - if let Some(ty) = r.ty() { - walk_ty(&ty, &mut |ty| match ty { - ast::Type::RefType(ty) if ty.lifetime().is_none() => { - if let Some(amp) = ty.amp_token() { - is_trivial = false; - acc.push(mk_lt_hint(amp, output_lt.to_string())); - } - false + if let (Some(output_lt), Some(r)) = (&output, ret_type) + && let Some(ty) = r.ty() + { + walk_ty(&ty, &mut |ty| match ty { + ast::Type::RefType(ty) if ty.lifetime().is_none() => { + if let Some(amp) = ty.amp_token() { + is_trivial = false; + acc.push(mk_lt_hint(amp, output_lt.to_string())); } - ast::Type::FnPtrType(_) => { + false + } + ast::Type::FnPtrType(_) => { + is_trivial = false; + true + } + ast::Type::PathType(t) => { + if t.path() + .and_then(|it| it.segment()) + .and_then(|it| it.parenthesized_arg_list()) + .is_some() + { is_trivial = false; true + } else { + false } - ast::Type::PathType(t) => { - if t.path() - .and_then(|it| it.segment()) - .and_then(|it| it.parenthesized_arg_list()) - .is_some() - { - is_trivial = false; - true - } else { - false - } - } - _ => false, - }) - } + } + _ => false, + }) } if config.lifetime_elision_hints == LifetimeElisionHints::SkipTrivial && is_trivial { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs index 5174228466c08..ec0a4c46c7fec 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs @@ -135,10 +135,10 @@ fn should_hide_param_name_hint( } if unary_function { - if let Some(function_name) = function_name { - if is_param_name_suffix_of_fn_name(param_name, function_name) { - return true; - } + if let Some(function_name) = function_name + && is_param_name_suffix_of_fn_name(param_name, function_name) + { + return true; } if is_obvious_param(param_name) { return true; diff --git a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs index 0188c105faa78..a946559c35455 100644 --- a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs +++ b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs @@ -144,15 +144,15 @@ fn remove_newline( } } - if config.join_else_if { - if let (Some(prev), Some(_next)) = (as_if_expr(&prev), as_if_expr(&next)) { - match prev.else_token() { - Some(_) => cov_mark::hit!(join_two_ifs_with_existing_else), - None => { - cov_mark::hit!(join_two_ifs); - edit.replace(token.text_range(), " else ".to_owned()); - return; - } + if config.join_else_if + && let (Some(prev), Some(_next)) = (as_if_expr(&prev), as_if_expr(&next)) + { + match prev.else_token() { + Some(_) => cov_mark::hit!(join_two_ifs_with_existing_else), + None => { + cov_mark::hit!(join_two_ifs); + edit.replace(token.text_range(), " else ".to_owned()); + return; } } } @@ -213,10 +213,10 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Op let mut buf = expr.syntax().text().to_string(); // Match block needs to have a comma after the block - if let Some(match_arm) = block_expr.syntax().parent().and_then(ast::MatchArm::cast) { - if match_arm.comma_token().is_none() { - buf.push(','); - } + if let Some(match_arm) = block_expr.syntax().parent().and_then(ast::MatchArm::cast) + && match_arm.comma_token().is_none() + { + buf.push(','); } edit.replace(block_range, buf); diff --git a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs index 50219cee57db4..96d829d1260bd 100644 --- a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs +++ b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs @@ -29,14 +29,13 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec(source_file.syntax(), position.offset); // If cursor is literally on `mod foo`, go to the grandpa. - if let Some(m) = &module { - if !m + if let Some(m) = &module + && !m .item_list() .is_some_and(|it| it.syntax().text_range().contains_inclusive(position.offset)) - { - cov_mark::hit!(test_resolve_parent_module_on_module_decl); - module = m.syntax().ancestors().skip(1).find_map(ast::Module::cast); - } + { + cov_mark::hit!(test_resolve_parent_module_on_module_decl); + module = m.syntax().ancestors().skip(1).find_map(ast::Module::cast); } match module { diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs index 6c1d142c3b058..634edaa5edaf0 100644 --- a/src/tools/rust-analyzer/crates/ide/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs @@ -494,10 +494,10 @@ mod tests { ) { let ra_fixture_after = &trim_indent(ra_fixture_after); let (analysis, position) = fixture::position(ra_fixture_before); - if !ra_fixture_after.starts_with("error: ") { - if let Err(err) = analysis.prepare_rename(position).unwrap() { - panic!("Prepare rename to '{new_name}' was failed: {err}") - } + if !ra_fixture_after.starts_with("error: ") + && let Err(err) = analysis.prepare_rename(position).unwrap() + { + panic!("Prepare rename to '{new_name}' was failed: {err}") } let rename_result = analysis .rename(position, new_name) diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index 9d1a5bae96fbc..83e5c5ab1dfeb 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -514,20 +514,19 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { .flat_map(|it| it.name(db)) .for_each(|name| format_to!(path, "{}::", name.display(db, edition))); // This probably belongs to canonical_path? - if let Some(assoc_item) = def.as_assoc_item(db) { - if let Some(ty) = assoc_item.implementing_ty(db) { - if let Some(adt) = ty.as_adt() { - let name = adt.name(db); - let mut ty_args = ty.generic_parameters(db, display_target).peekable(); - format_to!(path, "{}", name.display(db, edition)); - if ty_args.peek().is_some() { - format_to!(path, "<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))); - } - format_to!(path, "::{}", def_name.display(db, edition)); - path.retain(|c| c != ' '); - return Some(path); - } + if let Some(assoc_item) = def.as_assoc_item(db) + && let Some(ty) = assoc_item.implementing_ty(db) + && let Some(adt) = ty.as_adt() + { + let name = adt.name(db); + let mut ty_args = ty.generic_parameters(db, display_target).peekable(); + format_to!(path, "{}", name.display(db, edition)); + if ty_args.peek().is_some() { + format_to!(path, "<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))); } + format_to!(path, "::{}", def_name.display(db, edition)); + path.retain(|c| c != ' '); + return Some(path); } format_to!(path, "{}", def_name.display(db, edition)); Some(path) @@ -697,14 +696,13 @@ impl UpdateTest { continue; }; for item in items { - if let hir::ItemInNs::Macros(makro) = item { - if Definition::Macro(makro) + if let hir::ItemInNs::Macros(makro) = item + && Definition::Macro(makro) .usages(sema) .in_scope(&search_scope) .at_least_one() - { - return true; - } + { + return true; } } } diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs index e30a3ebefb98c..382573b680113 100644 --- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs +++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs @@ -146,12 +146,11 @@ pub(crate) fn signature_help( // Stop at multi-line expressions, since the signature of the outer call is not very // helpful inside them. - if let Some(expr) = ast::Expr::cast(node.clone()) { - if !matches!(expr, ast::Expr::RecordExpr(..)) - && expr.syntax().text().contains_char('\n') - { - break; - } + if let Some(expr) = ast::Expr::cast(node.clone()) + && !matches!(expr, ast::Expr::RecordExpr(..)) + && expr.syntax().text().contains_char('\n') + { + break; } } @@ -366,10 +365,10 @@ fn signature_help_for_generics( res.signature.push('<'); let mut buf = String::new(); for param in params { - if let hir::GenericParam::TypeParam(ty) = param { - if ty.is_implicit(db) { - continue; - } + if let hir::GenericParam::TypeParam(ty) = param + && ty.is_implicit(db) + { + continue; } buf.clear(); diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index efee39c13db94..694ac22e1993b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -133,10 +133,10 @@ fn get_definitions( ) -> Option> { for token in sema.descend_into_macros_exact(token) { let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); - if let Some(defs) = def { - if !defs.is_empty() { - return Some(defs); - } + if let Some(defs) = def + && !defs.is_empty() + { + return Some(defs); } } None diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs index 87db0cd7dc53c..8bde8fd970063 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs @@ -306,12 +306,12 @@ fn highlight_name_ref( }; let mut h = match name_class { NameRefClass::Definition(def, _) => { - if let Definition::Local(local) = &def { - if let Some(bindings_shadow_count) = bindings_shadow_count { - let name = local.name(sema.db); - let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); - *binding_hash = Some(calc_binding_hash(&name, *shadow_count)) - } + if let Definition::Local(local) = &def + && let Some(bindings_shadow_count) = bindings_shadow_count + { + let name = local.name(sema.db); + let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); + *binding_hash = Some(calc_binding_hash(&name, *shadow_count)) }; let mut h = highlight_def(sema, krate, def, edition, true); @@ -437,21 +437,21 @@ fn highlight_name( edition: Edition, ) -> Highlight { let name_kind = NameClass::classify(sema, &name); - if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind { - if let Some(bindings_shadow_count) = bindings_shadow_count { - let name = local.name(sema.db); - let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); - *shadow_count += 1; - *binding_hash = Some(calc_binding_hash(&name, *shadow_count)) - } + if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind + && let Some(bindings_shadow_count) = bindings_shadow_count + { + let name = local.name(sema.db); + let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); + *shadow_count += 1; + *binding_hash = Some(calc_binding_hash(&name, *shadow_count)) }; match name_kind { Some(NameClass::Definition(def)) => { let mut h = highlight_def(sema, krate, def, edition, false) | HlMod::Definition; - if let Definition::Trait(trait_) = &def { - if trait_.is_unsafe(sema.db) { - h |= HlMod::Unsafe; - } + if let Definition::Trait(trait_) = &def + && trait_.is_unsafe(sema.db) + { + h |= HlMod::Unsafe; } h } @@ -743,10 +743,9 @@ fn highlight_method_call( hir::Access::Owned => { if let Some(receiver_ty) = method_call.receiver().and_then(|it| sema.type_of_expr(&it)) + && !receiver_ty.adjusted().is_copy(sema.db) { - if !receiver_ty.adjusted().is_copy(sema.db) { - h |= HlMod::Consuming - } + h |= HlMod::Consuming } } } diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index 98f415a522cb8..ad838a6550eca 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -475,10 +475,10 @@ fn load_crate_graph_into_db( } let changes = vfs.take_changes(); for (_, file) in changes { - if let vfs::Change::Create(v, _) | vfs::Change::Modify(v, _) = file.change { - if let Ok(text) = String::from_utf8(v) { - analysis_change.change_file(file.file_id, Some(text)) - } + if let vfs::Change::Create(v, _) | vfs::Change::Modify(v, _) = file.change + && let Ok(text) = String::from_utf8(v) + { + analysis_change.change_file(file.file_id, Some(text)) } } let source_roots = source_root_config.partition(vfs); diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs index 04ac85ad43ddf..b185556b5c7b7 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs @@ -185,24 +185,22 @@ fn invocation_fixtures( for it in tokens.iter() { collect_from_op(it, builder, seed); } - if i + 1 != cnt { - if let Some(sep) = separator { - match &**sep { - Separator::Literal(it) => { - builder.push(tt::Leaf::Literal(it.clone())) + if i + 1 != cnt + && let Some(sep) = separator + { + match &**sep { + Separator::Literal(it) => builder.push(tt::Leaf::Literal(it.clone())), + Separator::Ident(it) => builder.push(tt::Leaf::Ident(it.clone())), + Separator::Puncts(puncts) => { + for it in puncts { + builder.push(tt::Leaf::Punct(*it)) } - Separator::Ident(it) => builder.push(tt::Leaf::Ident(it.clone())), - Separator::Puncts(puncts) => { - for it in puncts { - builder.push(tt::Leaf::Punct(*it)) - } - } - Separator::Lifetime(punct, ident) => { - builder.push(tt::Leaf::Punct(*punct)); - builder.push(tt::Leaf::Ident(ident.clone())); - } - }; - } + } + Separator::Lifetime(punct, ident) => { + builder.push(tt::Leaf::Punct(*punct)); + builder.push(tt::Leaf::Ident(ident.clone())); + } + }; } } } diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs index a8d5965d480c2..189efcd15c2f7 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs @@ -475,12 +475,12 @@ fn match_loop_inner<'t>( }) } OpDelimited::Op(Op::Subtree { tokens, delimiter }) => { - if let Ok((subtree, _)) = src.clone().expect_subtree() { - if subtree.delimiter.kind == delimiter.kind { - item.stack.push(item.dot); - item.dot = tokens.iter_delimited_with(*delimiter); - cur_items.push(item); - } + if let Ok((subtree, _)) = src.clone().expect_subtree() + && subtree.delimiter.kind == delimiter.kind + { + item.stack.push(item.dot); + item.dot = tokens.iter_delimited_with(*delimiter); + cur_items.push(item); } } OpDelimited::Op(Op::Var { kind, name, .. }) => { diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs index 2b4151e3b752d..41fd72d8d5a2f 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs @@ -77,38 +77,38 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) { return; } - if let Some((cm, blocklike)) = expr_stmt(p, Some(m)) { - if !(p.at(T!['}']) || (semicolon != Semicolon::Required && p.at(EOF))) { - // test no_semi_after_block - // fn foo() { - // if true {} - // loop {} - // match () {} - // while true {} - // for _ in () {} - // {} - // {} - // macro_rules! test { - // () => {} - // } - // test!{} - // } - let m = cm.precede(p); - match semicolon { - Semicolon::Required => { - if blocklike.is_block() { - p.eat(T![;]); - } else { - p.expect(T![;]); - } - } - Semicolon::Optional => { + if let Some((cm, blocklike)) = expr_stmt(p, Some(m)) + && !(p.at(T!['}']) || (semicolon != Semicolon::Required && p.at(EOF))) + { + // test no_semi_after_block + // fn foo() { + // if true {} + // loop {} + // match () {} + // while true {} + // for _ in () {} + // {} + // {} + // macro_rules! test { + // () => {} + // } + // test!{} + // } + let m = cm.precede(p); + match semicolon { + Semicolon::Required => { + if blocklike.is_block() { p.eat(T![;]); + } else { + p.expect(T![;]); } - Semicolon::Forbidden => (), } - m.complete(p, EXPR_STMT); + Semicolon::Optional => { + p.eat(T![;]); + } + Semicolon::Forbidden => (), } + m.complete(p, EXPR_STMT); } } @@ -134,14 +134,11 @@ pub(super) fn let_stmt(p: &mut Parser<'_>, with_semi: Semicolon) { if p.at(T![else]) { // test_err let_else_right_curly_brace // fn func() { let Some(_) = {Some(1)} else { panic!("h") };} - if let Some(expr) = expr_after_eq { - if let Some(token) = expr.last_token(p) { - if token == T!['}'] { - p.error( - "right curly brace `}` before `else` in a `let...else` statement not allowed" - ) - } - } + if let Some(expr) = expr_after_eq + && let Some(token) = expr.last_token(p) + && token == T!['}'] + { + p.error("right curly brace `}` before `else` in a `let...else` statement not allowed") } // test let_else diff --git a/src/tools/rust-analyzer/crates/parser/src/input.rs b/src/tools/rust-analyzer/crates/parser/src/input.rs index 4490956f97046..331bc58dd0523 100644 --- a/src/tools/rust-analyzer/crates/parser/src/input.rs +++ b/src/tools/rust-analyzer/crates/parser/src/input.rs @@ -61,7 +61,7 @@ impl Input { #[inline] fn push_impl(&mut self, kind: SyntaxKind, contextual_kind: SyntaxKind) { let idx = self.len(); - if idx % (bits::BITS as usize) == 0 { + if idx.is_multiple_of(bits::BITS as usize) { self.joint.push(0); } self.kind.push(kind); diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs index e2baec890c3a6..d5e513933f7a0 100644 --- a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs +++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs @@ -252,10 +252,10 @@ fn n_attached_trivias<'a>( WHITESPACE if text.contains("\n\n") => { // we check whether the next token is a doc-comment // and skip the whitespace in this case - if let Some((COMMENT, peek_text)) = trivias.peek().map(|(_, pair)| pair) { - if is_outer(peek_text) { - continue; - } + if let Some((COMMENT, peek_text)) = trivias.peek().map(|(_, pair)| pair) + && is_outer(peek_text) + { + continue; } break; } diff --git a/src/tools/rust-analyzer/crates/query-group-macro/src/lib.rs b/src/tools/rust-analyzer/crates/query-group-macro/src/lib.rs index ec4b6b2a4ac3c..277cc0b269d71 100644 --- a/src/tools/rust-analyzer/crates/query-group-macro/src/lib.rs +++ b/src/tools/rust-analyzer/crates/query-group-macro/src/lib.rs @@ -278,15 +278,15 @@ pub(crate) fn query_group_impl( return Err(syn::Error::new(signature.span(), "Queries must have a return type")); }; - if let syn::Type::Path(ref ty_path) = *return_ty { - if matches!(query_kind, QueryKind::Input) { - let field = InputStructField { - name: method_name.to_token_stream(), - ty: ty_path.path.to_token_stream(), - }; - - input_struct_fields.push(field); - } + if let syn::Type::Path(ref ty_path) = *return_ty + && matches!(query_kind, QueryKind::Input) + { + let field = InputStructField { + name: method_name.to_token_stream(), + ty: ty_path.path.to_token_stream(), + }; + + input_struct_fields.push(field); } if let Some(block) = &mut method.default { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs index 4dba97c8ec492..ab045e0bf9ff1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs @@ -122,12 +122,12 @@ fn setup_logging(log_file_flag: Option) -> anyhow::Result<()> { // directory which we set to the project workspace. // https://docs.microsoft.com/en-us/windows-hardware/drivers/debugger/general-environment-variables // https://docs.microsoft.com/en-us/windows/win32/api/dbghelp/nf-dbghelp-syminitialize - if let Ok(path) = env::current_exe() { - if let Some(path) = path.parent() { - // SAFETY: This is safe because this is single-threaded. - unsafe { - env::set_var("_NT_SYMBOL_PATH", path); - } + if let Ok(path) = env::current_exe() + && let Some(path) = path.parent() + { + // SAFETY: This is safe because this is single-threaded. + unsafe { + env::set_var("_NT_SYMBOL_PATH", path); } } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index 4f75d14834c64..97886844a9f9e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -136,34 +136,30 @@ impl flags::AnalysisStats { for source_root_id in source_roots { let source_root = db.source_root(source_root_id).source_root(db); for file_id in source_root.iter() { - if let Some(p) = source_root.path_for_file(&file_id) { - if let Some((_, Some("rs"))) = p.name_and_extension() { - // measure workspace/project code - if !source_root.is_library || self.with_deps { - let length = db.file_text(file_id).text(db).lines().count(); - let item_stats = db - .file_item_tree( - EditionedFileId::current_edition(db, file_id).into(), - ) - .item_tree_stats() - .into(); - - workspace_loc += length; - workspace_item_trees += 1; - workspace_item_stats += item_stats; - } else { - let length = db.file_text(file_id).text(db).lines().count(); - let item_stats = db - .file_item_tree( - EditionedFileId::current_edition(db, file_id).into(), - ) - .item_tree_stats() - .into(); - - dep_loc += length; - dep_item_trees += 1; - dep_item_stats += item_stats; - } + if let Some(p) = source_root.path_for_file(&file_id) + && let Some((_, Some("rs"))) = p.name_and_extension() + { + // measure workspace/project code + if !source_root.is_library || self.with_deps { + let length = db.file_text(file_id).text(db).lines().count(); + let item_stats = db + .file_item_tree(EditionedFileId::current_edition(db, file_id).into()) + .item_tree_stats() + .into(); + + workspace_loc += length; + workspace_item_trees += 1; + workspace_item_stats += item_stats; + } else { + let length = db.file_text(file_id).text(db).lines().count(); + let item_stats = db + .file_item_tree(EditionedFileId::current_edition(db, file_id).into()) + .item_tree_stats() + .into(); + + dep_loc += length; + dep_item_trees += 1; + dep_item_stats += item_stats; } } } @@ -560,29 +556,35 @@ impl flags::AnalysisStats { std::fs::write(path, txt).unwrap(); let res = ws.run_build_scripts(&cargo_config, &|_| ()).unwrap(); - if let Some(err) = res.error() { - if err.contains("error: could not compile") { - if let Some(mut err_idx) = err.find("error[E") { - err_idx += 7; - let err_code = &err[err_idx..err_idx + 4]; - match err_code { - "0282" | "0283" => continue, // Byproduct of testing method - "0277" | "0308" if generated.contains(&todo) => continue, // See https://github.com/rust-lang/rust/issues/69882 - // FIXME: In some rare cases `AssocItem::container_or_implemented_trait` returns `None` for trait methods. - // Generated code is valid in case traits are imported - "0599" if err.contains("the following trait is implemented but not in scope") => continue, - _ => (), + if let Some(err) = res.error() + && err.contains("error: could not compile") + { + if let Some(mut err_idx) = err.find("error[E") { + err_idx += 7; + let err_code = &err[err_idx..err_idx + 4]; + match err_code { + "0282" | "0283" => continue, // Byproduct of testing method + "0277" | "0308" if generated.contains(&todo) => continue, // See https://github.com/rust-lang/rust/issues/69882 + // FIXME: In some rare cases `AssocItem::container_or_implemented_trait` returns `None` for trait methods. + // Generated code is valid in case traits are imported + "0599" + if err.contains( + "the following trait is implemented but not in scope", + ) => + { + continue; } - bar.println(err); - bar.println(generated); - acc.error_codes - .entry(err_code.to_owned()) - .and_modify(|n| *n += 1) - .or_insert(1); - } else { - acc.syntax_errors += 1; - bar.println(format!("Syntax error: \n{err}")); + _ => (), } + bar.println(err); + bar.println(generated); + acc.error_codes + .entry(err_code.to_owned()) + .and_modify(|n| *n += 1) + .or_insert(1); + } else { + acc.syntax_errors += 1; + bar.println(format!("Syntax error: \n{err}")); } } } @@ -731,12 +733,11 @@ impl flags::AnalysisStats { let name = body_id.name(db).unwrap_or_else(Name::missing); let module = body_id.module(db); let display_target = module.krate().to_display_target(db); - if let Some(only_name) = self.only.as_deref() { - if name.display(db, Edition::LATEST).to_string() != only_name - && full_name(db, body_id, module) != only_name - { - continue; - } + if let Some(only_name) = self.only.as_deref() + && name.display(db, Edition::LATEST).to_string() != only_name + && full_name(db, body_id, module) != only_name + { + continue; } let msg = move || { if verbosity.is_verbose() { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs index 1b9b870a7c74c..028311388c561 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs @@ -83,11 +83,11 @@ impl<'a> ProgressReport<'a> { output.extend(text.chars().skip(common_prefix_length)); // If the new text is shorter than the old one: delete overlapping characters - if let Some(overlap_count) = self.text.len().checked_sub(text.len()) { - if overlap_count > 0 { - output += &" ".repeat(overlap_count); - output += &"\x08".repeat(overlap_count); - } + if let Some(overlap_count) = self.text.len().checked_sub(text.len()) + && overlap_count > 0 + { + output += &" ".repeat(overlap_count); + output += &"\x08".repeat(overlap_count); } let _ = io::stdout().write(output.as_bytes()); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs index 30ac93fb6f838..36ae98b321b84 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -305,10 +305,10 @@ impl flags::RustcTests { for i in walk_dir { let i = i?; let p = i.into_path(); - if let Some(f) = &self.filter { - if !p.as_os_str().to_string_lossy().contains(f) { - continue; - } + if let Some(f) = &self.filter + && !p.as_os_str().to_string_lossy().contains(f) + { + continue; } if p.extension().is_none_or(|x| x != "rs") { continue; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index a8bcce196c4ec..70d04485ca08c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -3904,17 +3904,16 @@ mod tests { for idx in url_offsets { let link = &schema[idx..]; // matching on whitespace to ignore normal links - if let Some(link_end) = link.find([' ', '[']) { - if link.chars().nth(link_end) == Some('[') { - if let Some(link_text_end) = link.find(']') { - let link_text = link[link_end..(link_text_end + 1)].to_string(); - - schema.replace_range((idx + link_end)..(idx + link_text_end + 1), ""); - schema.insert(idx, '('); - schema.insert(idx + link_end + 1, ')'); - schema.insert_str(idx, &link_text); - } - } + if let Some(link_end) = link.find([' ', '[']) + && link.chars().nth(link_end) == Some('[') + && let Some(link_text_end) = link.find(']') + { + let link_text = link[link_end..(link_text_end + 1)].to_string(); + + schema.replace_range((idx + link_end)..(idx + link_text_end + 1), ""); + schema.insert(idx, '('); + schema.insert(idx + link_end + 1, ')'); + schema.insert_str(idx, &link_text); } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs index 95857dd8f3b4b..389bb7848c01c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs @@ -73,19 +73,19 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { } // completion.snippets -> completion.snippets.custom; - if let Some(Value::Object(obj)) = copy.pointer("/completion/snippets").cloned() { - if obj.len() != 1 || obj.get("custom").is_none() { - merge( - json, - json! {{ - "completion": { - "snippets": { - "custom": obj - }, + if let Some(Value::Object(obj)) = copy.pointer("/completion/snippets").cloned() + && (obj.len() != 1 || obj.get("custom").is_none()) + { + merge( + json, + json! {{ + "completion": { + "snippets": { + "custom": obj }, - }}, - ); - } + }, + }}, + ); } // callInfo_full -> signatureInfo_detail, signatureInfo_documentation_enable diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs index 79d8f678de4d6..3f64628de8606 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -298,10 +298,10 @@ pub(crate) fn map_rust_diagnostic_to_lsp( let mut source = String::from("rustc"); let mut code = rd.code.as_ref().map(|c| c.code.clone()); - if let Some(code_val) = &code { - if config.check_ignore.contains(code_val) { - return Vec::new(); - } + if let Some(code_val) = &code + && config.check_ignore.contains(code_val) + { + return Vec::new(); } if let Some(code_val) = &code { @@ -373,10 +373,8 @@ pub(crate) fn map_rust_diagnostic_to_lsp( let primary_location = primary_location(config, workspace_root, primary_span, snap); let message = { let mut message = message.clone(); - if needs_primary_span_label { - if let Some(primary_span_label) = &primary_span.label { - format_to!(message, "\n{}", primary_span_label); - } + if needs_primary_span_label && let Some(primary_span_label) = &primary_span.label { + format_to!(message, "\n{}", primary_span_label); } message }; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 512ce0b9de354..e4e0bcdc1cd08 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -492,12 +492,11 @@ impl FlycheckActor { FlycheckConfig::CargoCommand { command, options, ansi_color_output } => { let mut cmd = toolchain::command(Tool::Cargo.path(), &*self.root, &options.extra_env); - if let Some(sysroot_root) = &self.sysroot_root { - if !options.extra_env.contains_key("RUSTUP_TOOLCHAIN") - && std::env::var_os("RUSTUP_TOOLCHAIN").is_none() - { - cmd.env("RUSTUP_TOOLCHAIN", AsRef::::as_ref(sysroot_root)); - } + if let Some(sysroot_root) = &self.sysroot_root + && !options.extra_env.contains_key("RUSTUP_TOOLCHAIN") + && std::env::var_os("RUSTUP_TOOLCHAIN").is_none() + { + cmd.env("RUSTUP_TOOLCHAIN", AsRef::::as_ref(sysroot_root)); } cmd.arg(command); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 62a28a1a685d4..3171bdd361785 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -591,10 +591,10 @@ impl GlobalState { pub(crate) fn respond(&mut self, response: lsp_server::Response) { if let Some((method, start)) = self.req_queue.incoming.complete(&response.id) { - if let Some(err) = &response.error { - if err.message.starts_with("server panicked") { - self.poke_rust_analyzer_developer(format!("{}, check the log", err.message)); - } + if let Some(err) = &response.error + && err.message.starts_with("server panicked") + { + self.poke_rust_analyzer_developer(format!("{}, check the log", err.message)); } let duration = start.elapsed(); @@ -663,18 +663,18 @@ impl GlobalState { pub(crate) fn check_workspaces_msrv(&self) -> impl Iterator + '_ { self.workspaces.iter().filter_map(|ws| { - if let Some(toolchain) = &ws.toolchain { - if *toolchain < crate::MINIMUM_SUPPORTED_TOOLCHAIN_VERSION { - return Some(format!( - "Workspace `{}` is using an outdated toolchain version `{}` but \ + if let Some(toolchain) = &ws.toolchain + && *toolchain < crate::MINIMUM_SUPPORTED_TOOLCHAIN_VERSION + { + return Some(format!( + "Workspace `{}` is using an outdated toolchain version `{}` but \ rust-analyzer only supports `{}` and higher.\n\ Consider using the rust-analyzer rustup component for your toolchain or upgrade your toolchain to a supported version.\n\n", - ws.manifest_or_root(), - toolchain, - crate::MINIMUM_SUPPORTED_TOOLCHAIN_VERSION, - )); - } + ws.manifest_or_root(), + toolchain, + crate::MINIMUM_SUPPORTED_TOOLCHAIN_VERSION, + )); } None }) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs index aea116e647db8..b25245dd884a4 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs @@ -433,10 +433,10 @@ impl NotificationDispatcher<'_> { } pub(crate) fn finish(&mut self) { - if let Some(not) = &self.not { - if !not.method.starts_with("$/") { - tracing::error!("unhandled notification: {:?}", not); - } + if let Some(not) = &self.not + && !not.method.starts_with("$/") + { + tracing::error!("unhandled notification: {:?}", not); } } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index 200e972e42897..e193ff77743d1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -39,14 +39,12 @@ pub(crate) fn handle_work_done_progress_cancel( state: &mut GlobalState, params: WorkDoneProgressCancelParams, ) -> anyhow::Result<()> { - if let lsp_types::NumberOrString::String(s) = ¶ms.token { - if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") { - if let Ok(id) = id.parse::() { - if let Some(flycheck) = state.flycheck.get(id as usize) { - flycheck.cancel(); - } - } - } + if let lsp_types::NumberOrString::String(s) = ¶ms.token + && let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") + && let Ok(id) = id.parse::() + && let Some(flycheck) = state.flycheck.get(id as usize) + { + flycheck.cancel(); } // Just ignore this. It is OK to continue sending progress @@ -76,12 +74,12 @@ pub(crate) fn handle_did_open_text_document( tracing::error!("duplicate DidOpenTextDocument: {}", path); } - if let Some(abs_path) = path.as_path() { - if state.config.excluded().any(|excluded| abs_path.starts_with(&excluded)) { - tracing::trace!("opened excluded file {abs_path}"); - state.vfs.write().0.insert_excluded_file(path); - return Ok(()); - } + if let Some(abs_path) = path.as_path() + && state.config.excluded().any(|excluded| abs_path.starts_with(&excluded)) + { + tracing::trace!("opened excluded file {abs_path}"); + state.vfs.write().0.insert_excluded_file(path); + return Ok(()); } let contents = params.text_document.text.into_bytes(); @@ -449,12 +447,11 @@ pub(crate) fn handle_run_flycheck( params: RunFlycheckParams, ) -> anyhow::Result<()> { let _p = tracing::info_span!("handle_run_flycheck").entered(); - if let Some(text_document) = params.text_document { - if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) { - if run_flycheck(state, vfs_path) { - return Ok(()); - } - } + if let Some(text_document) = params.text_document + && let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) + && run_flycheck(state, vfs_path) + { + return Ok(()); } // No specific flycheck was triggered, so let's trigger all of them. if state.config.flycheck_workspace(None) { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index a76a65220d3b0..25c0aac405e79 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -973,14 +973,13 @@ pub(crate) fn handle_runnables( res.push(runnable); } - if let lsp_ext::RunnableArgs::Cargo(r) = &mut runnable.args { - if let Some(TargetSpec::Cargo(CargoTargetSpec { + if let lsp_ext::RunnableArgs::Cargo(r) = &mut runnable.args + && let Some(TargetSpec::Cargo(CargoTargetSpec { sysroot_root: Some(sysroot_root), .. })) = &target_spec - { - r.environment.insert("RUSTC_TOOLCHAIN".to_owned(), sysroot_root.to_string()); - } + { + r.environment.insert("RUSTC_TOOLCHAIN".to_owned(), sysroot_root.to_string()); }; res.push(runnable); @@ -1034,25 +1033,25 @@ pub(crate) fn handle_runnables( } Some(TargetSpec::ProjectJson(_)) => {} None => { - if !snap.config.linked_or_discovered_projects().is_empty() { - if let Some(path) = snap.file_id_to_file_path(file_id).parent() { - let mut cargo_args = vec!["check".to_owned(), "--workspace".to_owned()]; - cargo_args.extend(config.cargo_extra_args.iter().cloned()); - res.push(lsp_ext::Runnable { - label: "cargo check --workspace".to_owned(), - location: None, - kind: lsp_ext::RunnableKind::Cargo, - args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs { - workspace_root: None, - cwd: path.as_path().unwrap().to_path_buf().into(), - override_cargo: config.override_cargo, - cargo_args, - executable_args: Vec::new(), - environment: Default::default(), - }), - }); - }; - } + if !snap.config.linked_or_discovered_projects().is_empty() + && let Some(path) = snap.file_id_to_file_path(file_id).parent() + { + let mut cargo_args = vec!["check".to_owned(), "--workspace".to_owned()]; + cargo_args.extend(config.cargo_extra_args.iter().cloned()); + res.push(lsp_ext::Runnable { + label: "cargo check --workspace".to_owned(), + location: None, + kind: lsp_ext::RunnableKind::Cargo, + args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs { + workspace_root: None, + cwd: path.as_path().unwrap().to_path_buf().into(), + override_cargo: config.override_cargo, + cargo_args, + executable_args: Vec::new(), + environment: Default::default(), + }), + }); + }; } } Ok(res) @@ -1557,12 +1556,12 @@ pub(crate) fn handle_code_action_resolve( code_action.edit = ca.edit; code_action.command = ca.command; - if let Some(edit) = code_action.edit.as_ref() { - if let Some(changes) = edit.document_changes.as_ref() { - for change in changes { - if let lsp_ext::SnippetDocumentChangeOperation::Op(res_op) = change { - resource_ops_supported(&snap.config, resolve_resource_op(res_op))? - } + if let Some(edit) = code_action.edit.as_ref() + && let Some(changes) = edit.document_changes.as_ref() + { + for change in changes { + if let lsp_ext::SnippetDocumentChangeOperation::Op(res_op) = change { + resource_ops_supported(&snap.config, resolve_resource_op(res_op))? } } } @@ -1958,12 +1957,11 @@ pub(crate) fn handle_semantic_tokens_full_delta( if let Some(cached_tokens @ lsp_types::SemanticTokens { result_id: Some(prev_id), .. }) = &cached_tokens + && *prev_id == params.previous_result_id { - if *prev_id == params.previous_result_id { - let delta = to_proto::semantic_token_delta(cached_tokens, &semantic_tokens); - snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens); - return Ok(Some(delta.into())); - } + let delta = to_proto::semantic_token_delta(cached_tokens, &semantic_tokens); + snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens); + return Ok(Some(delta.into())); } // Clone first to keep the lock short @@ -2122,24 +2120,25 @@ fn show_impl_command_link( snap: &GlobalStateSnapshot, position: &FilePosition, ) -> Option { - if snap.config.hover_actions().implementations && snap.config.client_commands().show_reference { - if let Some(nav_data) = snap.analysis.goto_implementation(*position).unwrap_or(None) { - let uri = to_proto::url(snap, position.file_id); - let line_index = snap.file_line_index(position.file_id).ok()?; - let position = to_proto::position(&line_index, position.offset); - let locations: Vec<_> = nav_data - .info - .into_iter() - .filter_map(|nav| to_proto::location_from_nav(snap, nav).ok()) - .collect(); - let title = to_proto::implementation_title(locations.len()); - let command = to_proto::command::show_references(title, &uri, position, locations); - - return Some(lsp_ext::CommandLinkGroup { - commands: vec![to_command_link(command, "Go to implementations".into())], - ..Default::default() - }); - } + if snap.config.hover_actions().implementations + && snap.config.client_commands().show_reference + && let Some(nav_data) = snap.analysis.goto_implementation(*position).unwrap_or(None) + { + let uri = to_proto::url(snap, position.file_id); + let line_index = snap.file_line_index(position.file_id).ok()?; + let position = to_proto::position(&line_index, position.offset); + let locations: Vec<_> = nav_data + .info + .into_iter() + .filter_map(|nav| to_proto::location_from_nav(snap, nav).ok()) + .collect(); + let title = to_proto::implementation_title(locations.len()); + let command = to_proto::command::show_references(title, &uri, position, locations); + + return Some(lsp_ext::CommandLinkGroup { + commands: vec![to_command_link(command, "Go to implementations".into())], + ..Default::default() + }); } None } @@ -2148,28 +2147,29 @@ fn show_ref_command_link( snap: &GlobalStateSnapshot, position: &FilePosition, ) -> Option { - if snap.config.hover_actions().references && snap.config.client_commands().show_reference { - if let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) { - let uri = to_proto::url(snap, position.file_id); - let line_index = snap.file_line_index(position.file_id).ok()?; - let position = to_proto::position(&line_index, position.offset); - let locations: Vec<_> = ref_search_res - .into_iter() - .flat_map(|res| res.references) - .flat_map(|(file_id, ranges)| { - ranges.into_iter().map(move |(range, _)| FileRange { file_id, range }) - }) - .unique() - .filter_map(|range| to_proto::location(snap, range).ok()) - .collect(); - let title = to_proto::reference_title(locations.len()); - let command = to_proto::command::show_references(title, &uri, position, locations); - - return Some(lsp_ext::CommandLinkGroup { - commands: vec![to_command_link(command, "Go to references".into())], - ..Default::default() - }); - } + if snap.config.hover_actions().references + && snap.config.client_commands().show_reference + && let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) + { + let uri = to_proto::url(snap, position.file_id); + let line_index = snap.file_line_index(position.file_id).ok()?; + let position = to_proto::position(&line_index, position.offset); + let locations: Vec<_> = ref_search_res + .into_iter() + .flat_map(|res| res.references) + .flat_map(|(file_id, ranges)| { + ranges.into_iter().map(move |(range, _)| FileRange { file_id, range }) + }) + .unique() + .filter_map(|range| to_proto::location(snap, range).ok()) + .collect(); + let title = to_proto::reference_title(locations.len()); + let command = to_proto::command::show_references(title, &uri, position, locations); + + return Some(lsp_ext::CommandLinkGroup { + commands: vec![to_command_link(command, "Go to references".into())], + ..Default::default() + }); } None } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 00cf890510d45..61c758d5e86e1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -501,14 +501,12 @@ impl GlobalState { } } - if self.config.cargo_autoreload_config(None) - || self.config.discover_workspace_config().is_some() - { - if let Some((cause, FetchWorkspaceRequest { path, force_crate_graph_reload })) = + if (self.config.cargo_autoreload_config(None) + || self.config.discover_workspace_config().is_some()) + && let Some((cause, FetchWorkspaceRequest { path, force_crate_graph_reload })) = self.fetch_workspaces_queue.should_start_op() - { - self.fetch_workspaces(cause, path, force_crate_graph_reload); - } + { + self.fetch_workspaces(cause, path, force_crate_graph_reload); } if !self.fetch_workspaces_queue.op_in_progress() { @@ -765,33 +763,33 @@ impl GlobalState { self.report_progress("Fetching", state, msg, None, None); } Task::DiscoverLinkedProjects(arg) => { - if let Some(cfg) = self.config.discover_workspace_config() { - if !self.discover_workspace_queue.op_in_progress() { - // the clone is unfortunately necessary to avoid a borrowck error when - // `self.report_progress` is called later - let title = &cfg.progress_label.clone(); - let command = cfg.command.clone(); - let discover = DiscoverCommand::new(self.discover_sender.clone(), command); - - self.report_progress(title, Progress::Begin, None, None, None); - self.discover_workspace_queue - .request_op("Discovering workspace".to_owned(), ()); - let _ = self.discover_workspace_queue.should_start_op(); - - let arg = match arg { - DiscoverProjectParam::Buildfile(it) => DiscoverArgument::Buildfile(it), - DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it), - }; + if let Some(cfg) = self.config.discover_workspace_config() + && !self.discover_workspace_queue.op_in_progress() + { + // the clone is unfortunately necessary to avoid a borrowck error when + // `self.report_progress` is called later + let title = &cfg.progress_label.clone(); + let command = cfg.command.clone(); + let discover = DiscoverCommand::new(self.discover_sender.clone(), command); + + self.report_progress(title, Progress::Begin, None, None, None); + self.discover_workspace_queue + .request_op("Discovering workspace".to_owned(), ()); + let _ = self.discover_workspace_queue.should_start_op(); + + let arg = match arg { + DiscoverProjectParam::Buildfile(it) => DiscoverArgument::Buildfile(it), + DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it), + }; - let handle = discover.spawn( - arg, - &std::env::current_dir() - .expect("Failed to get cwd during project discovery"), - ); - self.discover_handle = Some(handle.unwrap_or_else(|e| { - panic!("Failed to spawn project discovery command: {e}") - })); - } + let handle = discover.spawn( + arg, + &std::env::current_dir() + .expect("Failed to get cwd during project discovery"), + ); + self.discover_handle = Some(handle.unwrap_or_else(|e| { + panic!("Failed to spawn project discovery command: {e}") + })); } } Task::FetchBuildData(progress) => { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index e798aa6a8a606..aa38aa72d44eb 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -306,13 +306,13 @@ impl GlobalState { _ => None, }); - if let Some(build) = build { - if is_quiescent { - let path = AbsPathBuf::try_from(build.build_file) - .expect("Unable to convert to an AbsPath"); - let arg = DiscoverProjectParam::Buildfile(path); - sender.send(Task::DiscoverLinkedProjects(arg)).unwrap(); - } + if let Some(build) = build + && is_quiescent + { + let path = AbsPathBuf::try_from(build.build_file) + .expect("Unable to convert to an AbsPath"); + let arg = DiscoverProjectParam::Buildfile(path); + sender.send(Task::DiscoverLinkedProjects(arg)).unwrap(); } } diff --git a/src/tools/rust-analyzer/crates/span/src/map.rs b/src/tools/rust-analyzer/crates/span/src/map.rs index f58201793da28..bb09933536e71 100644 --- a/src/tools/rust-analyzer/crates/span/src/map.rs +++ b/src/tools/rust-analyzer/crates/span/src/map.rs @@ -41,13 +41,13 @@ where /// Pushes a new span onto the [`SpanMap`]. pub fn push(&mut self, offset: TextSize, span: SpanData) { - if cfg!(debug_assertions) { - if let Some(&(last_offset, _)) = self.spans.last() { - assert!( - last_offset < offset, - "last_offset({last_offset:?}) must be smaller than offset({offset:?})" - ); - } + if cfg!(debug_assertions) + && let Some(&(last_offset, _)) = self.spans.last() + { + assert!( + last_offset < offset, + "last_offset({last_offset:?}) must be smaller than offset({offset:?})" + ); } self.spans.push((offset, span)); } diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs index d59229952f527..bdff671802c2a 100644 --- a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs @@ -768,17 +768,17 @@ where } fn bump(&mut self) -> Option<(Self::Token, TextRange)> { - if let Some((punct, offset)) = self.punct_offset.clone() { - if usize::from(offset) + 1 < punct.text().len() { - let offset = offset + TextSize::of('.'); - let range = punct.text_range(); - self.punct_offset = Some((punct.clone(), offset)); - let range = TextRange::at(range.start() + offset, TextSize::of('.')); - return Some(( - SynToken::Punct { token: punct, offset: u32::from(offset) as usize }, - range, - )); - } + if let Some((punct, offset)) = self.punct_offset.clone() + && usize::from(offset) + 1 < punct.text().len() + { + let offset = offset + TextSize::of('.'); + let range = punct.text_range(); + self.punct_offset = Some((punct.clone(), offset)); + let range = TextRange::at(range.start() + offset, TextSize::of('.')); + return Some(( + SynToken::Punct { token: punct, offset: u32::from(offset) as usize }, + range, + )); } if let Some(leaf) = self.current_leaves.pop() { diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/prettify_macro_expansion.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/prettify_macro_expansion.rs index 0a5c8df0d0aef..2f932e0458324 100644 --- a/src/tools/rust-analyzer/crates/syntax-bridge/src/prettify_macro_expansion.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/prettify_macro_expansion.rs @@ -61,10 +61,11 @@ pub fn prettify_macro_expansion( } _ => continue, }; - if token.kind() == SyntaxKind::IDENT && token.text() == "$crate" { - if let Some(replacement) = dollar_crate_replacement(&token) { - dollar_crate_replacements.push((token.clone(), replacement)); - } + if token.kind() == SyntaxKind::IDENT + && token.text() == "$crate" + && let Some(replacement) = dollar_crate_replacement(&token) + { + dollar_crate_replacements.push((token.clone(), replacement)); } let tok = &token; diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs index 8871bf56a5df7..c8dc3131b59c6 100644 --- a/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs @@ -34,14 +34,11 @@ fn check_punct_spacing(fixture: &str) { while !cursor.eof() { while let Some(token_tree) = cursor.token_tree() { if let tt::TokenTree::Leaf(Leaf::Punct(Punct { - spacing, - span: Span { range, .. }, - .. + spacing, span: Span { range, .. }, .. })) = token_tree + && let Some(expected) = annotations.remove(range) { - if let Some(expected) = annotations.remove(range) { - assert_eq!(expected, *spacing); - } + assert_eq!(expected, *spacing); } cursor.bump(); } diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs index 021dc6595f9b9..c0ff8e1db2c2d 100644 --- a/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs @@ -21,17 +21,17 @@ pub fn to_parser_input( let tt = current.token_tree(); // Check if it is lifetime - if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tt { - if punct.char == '\'' { - current.bump(); - match current.token_tree() { - Some(tt::TokenTree::Leaf(tt::Leaf::Ident(_ident))) => { - res.push(LIFETIME_IDENT); - current.bump(); - continue; - } - _ => panic!("Next token must be ident"), + if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tt + && punct.char == '\'' + { + current.bump(); + match current.token_tree() { + Some(tt::TokenTree::Leaf(tt::Leaf::Ident(_ident))) => { + res.push(LIFETIME_IDENT); + current.bump(); + continue; } + _ => panic!("Next token must be ident"), } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs index d97fdec524fbb..9b30642fe4b08 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs @@ -89,11 +89,11 @@ impl IndentLevel { _ => None, }); for token in tokens { - if let Some(ws) = ast::Whitespace::cast(token) { - if ws.text().contains('\n') { - let new_ws = make::tokens::whitespace(&format!("{}{self}", ws.syntax())); - ted::replace(ws.syntax(), &new_ws); - } + if let Some(ws) = ast::Whitespace::cast(token) + && ws.text().contains('\n') + { + let new_ws = make::tokens::whitespace(&format!("{}{self}", ws.syntax())); + ted::replace(ws.syntax(), &new_ws); } } } @@ -122,13 +122,13 @@ impl IndentLevel { _ => None, }); for token in tokens { - if let Some(ws) = ast::Whitespace::cast(token) { - if ws.text().contains('\n') { - let new_ws = make::tokens::whitespace( - &ws.syntax().text().replace(&format!("\n{self}"), "\n"), - ); - ted::replace(ws.syntax(), &new_ws); - } + if let Some(ws) = ast::Whitespace::cast(token) + && ws.text().contains('\n') + { + let new_ws = make::tokens::whitespace( + &ws.syntax().text().replace(&format!("\n{self}"), "\n"), + ); + ted::replace(ws.syntax(), &new_ws); } } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs index 28b543ea70644..f01ac081c8bdd 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -383,10 +383,10 @@ impl ast::GenericParamList { impl ast::WhereClause { pub fn add_predicate(&self, predicate: ast::WherePred) { - if let Some(pred) = self.predicates().last() { - if !pred.syntax().siblings_with_tokens(Direction::Next).any(|it| it.kind() == T![,]) { - ted::append_child_raw(self.syntax(), make::token(T![,])); - } + if let Some(pred) = self.predicates().last() + && !pred.syntax().siblings_with_tokens(Direction::Next).any(|it| it.kind() == T![,]) + { + ted::append_child_raw(self.syntax(), make::token(T![,])); } ted::append_child(self.syntax(), predicate.syntax()); } @@ -744,10 +744,10 @@ impl ast::LetStmt { } if let Some(existing_ty) = self.ty() { - if let Some(sibling) = existing_ty.syntax().prev_sibling_or_token() { - if sibling.kind() == SyntaxKind::WHITESPACE { - ted::remove(sibling); - } + if let Some(sibling) = existing_ty.syntax().prev_sibling_or_token() + && sibling.kind() == SyntaxKind::WHITESPACE + { + ted::remove(sibling); } ted::remove(existing_ty.syntax()); @@ -823,19 +823,18 @@ impl ast::RecordExprField { return; } // this is a shorthand - if let Some(ast::Expr::PathExpr(path_expr)) = self.expr() { - if let Some(path) = path_expr.path() { - if let Some(name_ref) = path.as_single_name_ref() { - path_expr.syntax().detach(); - let children = vec![ - name_ref.syntax().clone().into(), - ast::make::token(T![:]).into(), - ast::make::tokens::single_space().into(), - expr.syntax().clone().into(), - ]; - ted::insert_all_raw(Position::last_child_of(self.syntax()), children); - } - } + if let Some(ast::Expr::PathExpr(path_expr)) = self.expr() + && let Some(path) = path_expr.path() + && let Some(name_ref) = path.as_single_name_ref() + { + path_expr.syntax().detach(); + let children = vec![ + name_ref.syntax().clone().into(), + ast::make::token(T![:]).into(), + ast::make::tokens::single_space().into(), + expr.syntax().clone().into(), + ]; + ted::insert_all_raw(Position::last_child_of(self.syntax()), children); } } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs index 00750bff0ba20..1364adb187fcc 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs @@ -276,19 +276,19 @@ impl Expr { } // Not every expression can be followed by `else` in the `let-else` - if let Some(ast::Stmt::LetStmt(e)) = stmt { - if e.let_else().is_some() { - match self { - BinExpr(e) - if e.op_kind() - .map(|op| matches!(op, BinaryOp::LogicOp(_))) - .unwrap_or(false) => - { - return true; - } - _ if self.clone().trailing_brace().is_some() => return true, - _ => {} + if let Some(ast::Stmt::LetStmt(e)) = stmt + && e.let_else().is_some() + { + match self { + BinExpr(e) + if e.op_kind() + .map(|op| matches!(op, BinaryOp::LogicOp(_))) + .unwrap_or(false) => + { + return true; } + _ if self.clone().trailing_brace().is_some() => return true, + _ => {} } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs index 5107754b18257..124ac5c072c91 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs @@ -626,10 +626,10 @@ mod tests { if let Some(ret_ty) = parent_fn.ret_type() { editor.delete(ret_ty.syntax().clone()); - if let Some(SyntaxElement::Token(token)) = ret_ty.syntax().next_sibling_or_token() { - if token.kind().is_trivia() { - editor.delete(token); - } + if let Some(SyntaxElement::Token(token)) = ret_ty.syntax().next_sibling_or_token() + && token.kind().is_trivia() + { + editor.delete(token); } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ted.rs b/src/tools/rust-analyzer/crates/syntax/src/ted.rs index 6fcbdd006c244..5c286479c4e3d 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ted.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ted.rs @@ -90,15 +90,15 @@ pub fn insert_raw(position: Position, elem: impl Element) { insert_all_raw(position, vec![elem.syntax_element()]); } pub fn insert_all(position: Position, mut elements: Vec) { - if let Some(first) = elements.first() { - if let Some(ws) = ws_before(&position, first) { - elements.insert(0, ws.into()); - } + if let Some(first) = elements.first() + && let Some(ws) = ws_before(&position, first) + { + elements.insert(0, ws.into()); } - if let Some(last) = elements.last() { - if let Some(ws) = ws_after(&position, last) { - elements.push(ws.into()); - } + if let Some(last) = elements.last() + && let Some(ws) = ws_after(&position, last) + { + elements.push(ws.into()); } insert_all_raw(position, elements); } @@ -165,20 +165,22 @@ fn ws_before(position: &Position, new: &SyntaxElement) -> Option { PositionRepr::After(it) => it, }; - if prev.kind() == T!['{'] && new.kind() == SyntaxKind::USE { - if let Some(item_list) = prev.parent().and_then(ast::ItemList::cast) { - let mut indent = IndentLevel::from_element(&item_list.syntax().clone().into()); - indent.0 += 1; - return Some(make::tokens::whitespace(&format!("\n{indent}"))); - } + if prev.kind() == T!['{'] + && new.kind() == SyntaxKind::USE + && let Some(item_list) = prev.parent().and_then(ast::ItemList::cast) + { + let mut indent = IndentLevel::from_element(&item_list.syntax().clone().into()); + indent.0 += 1; + return Some(make::tokens::whitespace(&format!("\n{indent}"))); } - if prev.kind() == T!['{'] && ast::Stmt::can_cast(new.kind()) { - if let Some(stmt_list) = prev.parent().and_then(ast::StmtList::cast) { - let mut indent = IndentLevel::from_element(&stmt_list.syntax().clone().into()); - indent.0 += 1; - return Some(make::tokens::whitespace(&format!("\n{indent}"))); - } + if prev.kind() == T!['{'] + && ast::Stmt::can_cast(new.kind()) + && let Some(stmt_list) = prev.parent().and_then(ast::StmtList::cast) + { + let mut indent = IndentLevel::from_element(&stmt_list.syntax().clone().into()); + indent.0 += 1; + return Some(make::tokens::whitespace(&format!("\n{indent}"))); } ws_between(prev, new) diff --git a/src/tools/rust-analyzer/crates/syntax/src/validation.rs b/src/tools/rust-analyzer/crates/syntax/src/validation.rs index 4180f9cd18550..485140be8f69c 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/validation.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/validation.rs @@ -142,50 +142,50 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec) { match literal.kind() { ast::LiteralKind::String(s) => { - if !s.is_raw() { - if let Some(without_quotes) = unquote(text, 1, '"') { - unescape_str(without_quotes, |range, char| { - if let Err(err) = char { - push_err(1, range.start, err); - } - }); - } + if !s.is_raw() + && let Some(without_quotes) = unquote(text, 1, '"') + { + unescape_str(without_quotes, |range, char| { + if let Err(err) = char { + push_err(1, range.start, err); + } + }); } } ast::LiteralKind::ByteString(s) => { - if !s.is_raw() { - if let Some(without_quotes) = unquote(text, 2, '"') { - unescape_byte_str(without_quotes, |range, char| { - if let Err(err) = char { - push_err(1, range.start, err); - } - }); - } + if !s.is_raw() + && let Some(without_quotes) = unquote(text, 2, '"') + { + unescape_byte_str(without_quotes, |range, char| { + if let Err(err) = char { + push_err(1, range.start, err); + } + }); } } ast::LiteralKind::CString(s) => { - if !s.is_raw() { - if let Some(without_quotes) = unquote(text, 2, '"') { - unescape_c_str(without_quotes, |range, char| { - if let Err(err) = char { - push_err(1, range.start, err); - } - }); - } + if !s.is_raw() + && let Some(without_quotes) = unquote(text, 2, '"') + { + unescape_c_str(without_quotes, |range, char| { + if let Err(err) = char { + push_err(1, range.start, err); + } + }); } } ast::LiteralKind::Char(_) => { - if let Some(without_quotes) = unquote(text, 1, '\'') { - if let Err(err) = unescape_char(without_quotes) { - push_err(1, 0, err); - } + if let Some(without_quotes) = unquote(text, 1, '\'') + && let Err(err) = unescape_char(without_quotes) + { + push_err(1, 0, err); } } ast::LiteralKind::Byte(_) => { - if let Some(without_quotes) = unquote(text, 2, '\'') { - if let Err(err) = unescape_byte(without_quotes) { - push_err(2, 0, err); - } + if let Some(without_quotes) = unquote(text, 2, '\'') + && let Err(err) = unescape_byte(without_quotes) + { + push_err(2, 0, err); } } ast::LiteralKind::IntNumber(_) @@ -224,14 +224,14 @@ pub(crate) fn validate_block_structure(root: &SyntaxNode) { } fn validate_numeric_name(name_ref: Option, errors: &mut Vec) { - if let Some(int_token) = int_token(name_ref) { - if int_token.text().chars().any(|c| !c.is_ascii_digit()) { - errors.push(SyntaxError::new( - "Tuple (struct) field access is only allowed through \ + if let Some(int_token) = int_token(name_ref) + && int_token.text().chars().any(|c| !c.is_ascii_digit()) + { + errors.push(SyntaxError::new( + "Tuple (struct) field access is only allowed through \ decimal integers with no underscores or suffix", - int_token.text_range(), - )); - } + int_token.text_range(), + )); } fn int_token(name_ref: Option) -> Option { @@ -285,13 +285,13 @@ fn validate_path_keywords(segment: ast::PathSegment, errors: &mut Vec Option { diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs index 8937e53175abd..4413d2f222c15 100644 --- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs @@ -955,12 +955,12 @@ impl ProcMacroExpander for DisallowCfgProcMacroExpander { _: String, ) -> Result { for tt in subtree.token_trees().flat_tokens() { - if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = tt { - if ident.sym == sym::cfg || ident.sym == sym::cfg_attr { - return Err(ProcMacroExpansionError::Panic( - "cfg or cfg_attr found in DisallowCfgProcMacroExpander".to_owned(), - )); - } + if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = tt + && (ident.sym == sym::cfg || ident.sym == sym::cfg_attr) + { + return Err(ProcMacroExpansionError::Panic( + "cfg or cfg_attr found in DisallowCfgProcMacroExpander".to_owned(), + )); } } Ok(subtree.clone()) diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs index 44123385c8cc3..243a27b83b0df 100644 --- a/src/tools/rust-analyzer/crates/tt/src/lib.rs +++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs @@ -357,10 +357,10 @@ impl<'a, S: Copy> TokenTreesView<'a, S> { } pub fn try_into_subtree(self) -> Option> { - if let Some(TokenTree::Subtree(subtree)) = self.0.first() { - if subtree.usize_len() == (self.0.len() - 1) { - return Some(SubtreeView::new(self.0)); - } + if let Some(TokenTree::Subtree(subtree)) = self.0.first() + && subtree.usize_len() == (self.0.len() - 1) + { + return Some(SubtreeView::new(self.0)); } None } @@ -1028,10 +1028,10 @@ pub fn pretty(mut tkns: &[TokenTree]) -> String { tkns = rest; last = [last, tokentree_to_text(tkn, &mut tkns)].join(if last_to_joint { "" } else { " " }); last_to_joint = false; - if let TokenTree::Leaf(Leaf::Punct(punct)) = tkn { - if punct.spacing == Spacing::Joint { - last_to_joint = true; - } + if let TokenTree::Leaf(Leaf::Punct(punct)) = tkn + && punct.spacing == Spacing::Joint + { + last_to_joint = true; } } last diff --git a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs index a03337dbc51ea..c6393cc6922a2 100644 --- a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs +++ b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs @@ -194,52 +194,49 @@ impl NotifyActor { } }, Event::NotifyEvent(event) => { - if let Some(event) = log_notify_error(event) { - if let EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) = + if let Some(event) = log_notify_error(event) + && let EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) = event.kind - { - let files = event - .paths - .into_iter() - .filter_map(|path| { - Some( - AbsPathBuf::try_from( - Utf8PathBuf::from_path_buf(path).ok()?, - ) + { + let files = event + .paths + .into_iter() + .filter_map(|path| { + Some( + AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).ok()?) .expect("path is absolute"), - ) - }) - .filter_map(|path| -> Option<(AbsPathBuf, Option>)> { - let meta = fs::metadata(&path).ok()?; - if meta.file_type().is_dir() - && self - .watched_dir_entries - .iter() - .any(|dir| dir.contains_dir(&path)) - { - self.watch(path.as_ref()); - return None; - } - - if !meta.file_type().is_file() { - return None; - } - - if !(self.watched_file_entries.contains(&path) - || self - .watched_dir_entries - .iter() - .any(|dir| dir.contains_file(&path))) - { - return None; - } - - let contents = read(&path); - Some((path, contents)) - }) - .collect(); - self.send(loader::Message::Changed { files }); - } + ) + }) + .filter_map(|path| -> Option<(AbsPathBuf, Option>)> { + let meta = fs::metadata(&path).ok()?; + if meta.file_type().is_dir() + && self + .watched_dir_entries + .iter() + .any(|dir| dir.contains_dir(&path)) + { + self.watch(path.as_ref()); + return None; + } + + if !meta.file_type().is_file() { + return None; + } + + if !(self.watched_file_entries.contains(&path) + || self + .watched_dir_entries + .iter() + .any(|dir| dir.contains_file(&path))) + { + return None; + } + + let contents = read(&path); + Some((path, contents)) + }) + .collect(); + self.send(loader::Message::Changed { files }); } } } diff --git a/src/tools/rust-analyzer/xtask/src/codegen.rs b/src/tools/rust-analyzer/xtask/src/codegen.rs index 19ca62e8a3290..bc7eb88f3a848 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen.rs @@ -173,11 +173,11 @@ fn add_preamble(cg: CodegenType, mut text: String) -> String { #[allow(clippy::print_stderr)] fn ensure_file_contents(cg: CodegenType, file: &Path, contents: &str, check: bool) -> bool { let contents = normalize_newlines(contents); - if let Ok(old_contents) = fs::read_to_string(file) { - if normalize_newlines(&old_contents) == contents { - // File is already up to date. - return false; - } + if let Ok(old_contents) = fs::read_to_string(file) + && normalize_newlines(&old_contents) == contents + { + // File is already up to date. + return false; } let display_path = file.strip_prefix(project_root()).unwrap_or(file); diff --git a/src/tools/rust-analyzer/xtask/src/publish/notes.rs b/src/tools/rust-analyzer/xtask/src/publish/notes.rs index 93592d4986f8a..8d36fcb61b44c 100644 --- a/src/tools/rust-analyzer/xtask/src/publish/notes.rs +++ b/src/tools/rust-analyzer/xtask/src/publish/notes.rs @@ -72,13 +72,13 @@ impl<'a, 'b, R: BufRead> Converter<'a, 'b, R> { } fn process_document_title(&mut self) -> anyhow::Result<()> { - if let Some(Ok(line)) = self.iter.next() { - if let Some((level, title)) = get_title(&line) { - let title = process_inline_macros(title)?; - if level == 1 { - self.write_title(level, &title); - return Ok(()); - } + if let Some(Ok(line)) = self.iter.next() + && let Some((level, title)) = get_title(&line) + { + let title = process_inline_macros(title)?; + if level == 1 { + self.write_title(level, &title); + return Ok(()); } } bail!("document title not found") @@ -141,39 +141,39 @@ impl<'a, 'b, R: BufRead> Converter<'a, 'b, R> { } fn process_source_code_block(&mut self, level: usize) -> anyhow::Result<()> { - if let Some(Ok(line)) = self.iter.next() { - if let Some(styles) = line.strip_prefix("[source").and_then(|s| s.strip_suffix(']')) { - let mut styles = styles.split(','); - if !styles.next().unwrap().is_empty() { - bail!("not a source code block"); - } - let language = styles.next(); - return self.process_listing_block(language, level); + if let Some(Ok(line)) = self.iter.next() + && let Some(styles) = line.strip_prefix("[source").and_then(|s| s.strip_suffix(']')) + { + let mut styles = styles.split(','); + if !styles.next().unwrap().is_empty() { + bail!("not a source code block"); } + let language = styles.next(); + return self.process_listing_block(language, level); } bail!("not a source code block") } fn process_listing_block(&mut self, style: Option<&str>, level: usize) -> anyhow::Result<()> { - if let Some(Ok(line)) = self.iter.next() { - if line == LISTING_DELIMITER { - self.write_indent(level); - self.output.push_str("```"); - if let Some(style) = style { - self.output.push_str(style); - } - self.output.push('\n'); - while let Some(line) = self.iter.next() { - let line = line?; - if line == LISTING_DELIMITER { - self.write_line("```", level); - return Ok(()); - } else { - self.write_line(&line, level); - } + if let Some(Ok(line)) = self.iter.next() + && line == LISTING_DELIMITER + { + self.write_indent(level); + self.output.push_str("```"); + if let Some(style) = style { + self.output.push_str(style); + } + self.output.push('\n'); + while let Some(line) = self.iter.next() { + let line = line?; + if line == LISTING_DELIMITER { + self.write_line("```", level); + return Ok(()); + } else { + self.write_line(&line, level); } - bail!("listing block is not terminated") } + bail!("listing block is not terminated") } bail!("not a listing block") } @@ -200,49 +200,48 @@ impl<'a, 'b, R: BufRead> Converter<'a, 'b, R> { } fn process_image_block(&mut self, caption: Option<&str>, level: usize) -> anyhow::Result<()> { - if let Some(Ok(line)) = self.iter.next() { - if let Some((url, attrs)) = parse_media_block(&line, IMAGE_BLOCK_PREFIX) { - let alt = if let Some(stripped) = - attrs.strip_prefix('"').and_then(|s| s.strip_suffix('"')) - { + if let Some(Ok(line)) = self.iter.next() + && let Some((url, attrs)) = parse_media_block(&line, IMAGE_BLOCK_PREFIX) + { + let alt = + if let Some(stripped) = attrs.strip_prefix('"').and_then(|s| s.strip_suffix('"')) { stripped } else { attrs }; - if let Some(caption) = caption { - self.write_caption_line(caption, level); - } - self.write_indent(level); - self.output.push_str("!["); - self.output.push_str(alt); - self.output.push_str("]("); - self.output.push_str(url); - self.output.push_str(")\n"); - return Ok(()); + if let Some(caption) = caption { + self.write_caption_line(caption, level); } + self.write_indent(level); + self.output.push_str("!["); + self.output.push_str(alt); + self.output.push_str("]("); + self.output.push_str(url); + self.output.push_str(")\n"); + return Ok(()); } bail!("not a image block") } fn process_video_block(&mut self, caption: Option<&str>, level: usize) -> anyhow::Result<()> { - if let Some(Ok(line)) = self.iter.next() { - if let Some((url, attrs)) = parse_media_block(&line, VIDEO_BLOCK_PREFIX) { - let html_attrs = match attrs { - "options=loop" => "controls loop", - r#"options="autoplay,loop""# => "autoplay controls loop", - _ => bail!("unsupported video syntax"), - }; - if let Some(caption) = caption { - self.write_caption_line(caption, level); - } - self.write_indent(level); - self.output.push_str(r#"\n"); - return Ok(()); + if let Some(Ok(line)) = self.iter.next() + && let Some((url, attrs)) = parse_media_block(&line, VIDEO_BLOCK_PREFIX) + { + let html_attrs = match attrs { + "options=loop" => "controls loop", + r#"options="autoplay,loop""# => "autoplay controls loop", + _ => bail!("unsupported video syntax"), + }; + if let Some(caption) = caption { + self.write_caption_line(caption, level); } + self.write_indent(level); + self.output.push_str(r#"\n"); + return Ok(()); } bail!("not a video block") } @@ -371,12 +370,11 @@ fn strip_prefix_symbol(line: &str, symbol: char) -> Option<(usize, &str)> { } fn parse_media_block<'a>(line: &'a str, prefix: &str) -> Option<(&'a str, &'a str)> { - if let Some(line) = line.strip_prefix(prefix) { - if let Some((url, rest)) = line.split_once('[') { - if let Some(attrs) = rest.strip_suffix(']') { - return Some((url, attrs)); - } - } + if let Some(line) = line.strip_prefix(prefix) + && let Some((url, rest)) = line.split_once('[') + && let Some(attrs) = rest.strip_suffix(']') + { + return Some((url, attrs)); } None } From f638ebcfcea887eff5f3f05b1b2a455176a3d49c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jana=20D=C3=B6nszelmann?= Date: Thu, 31 Jul 2025 11:00:40 +0200 Subject: [PATCH 061/118] remove rustc_attr_data_structures --- src/attributes.rs | 4 ++-- src/callee.rs | 2 +- src/lib.rs | 1 - 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/src/attributes.rs b/src/attributes.rs index 7a1ae6ca9c8b7..04b43bb8bb7c4 100644 --- a/src/attributes.rs +++ b/src/attributes.rs @@ -2,8 +2,8 @@ use gccjit::FnAttribute; use gccjit::Function; #[cfg(feature = "master")] -use rustc_attr_data_structures::InlineAttr; -use rustc_attr_data_structures::InstructionSetAttr; +use rustc_hir::attrs::InlineAttr; +use rustc_hir::attrs::InstructionSetAttr; #[cfg(feature = "master")] use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; #[cfg(feature = "master")] diff --git a/src/callee.rs b/src/callee.rs index e7ca95af594c6..8487a85bd035d 100644 --- a/src/callee.rs +++ b/src/callee.rs @@ -106,7 +106,7 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>) // This is a monomorphization of a generic function. if !(cx.tcx.sess.opts.share_generics() || tcx.codegen_instance_attrs(instance.def).inline - == rustc_attr_data_structures::InlineAttr::Never) + == rustc_hir::attrs::InlineAttr::Never) { // When not sharing generics, all instances are in the same // crate and have hidden visibility. diff --git a/src/lib.rs b/src/lib.rs index a312068250073..613315f77a6b3 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -35,7 +35,6 @@ extern crate tracing; extern crate rustc_abi; extern crate rustc_apfloat; extern crate rustc_ast; -extern crate rustc_attr_data_structures; extern crate rustc_codegen_ssa; extern crate rustc_data_structures; extern crate rustc_errors; From 8a2a9db29eca4468eb1ccf58183b01fe61d00120 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Fri, 1 Aug 2025 10:40:19 -0400 Subject: [PATCH 062/118] Fix LTO errors by not adding AlwaysInline to __rust_alloc_error_handler_should_panic_v2 --- src/allocator.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/allocator.rs b/src/allocator.rs index 66258390d9092..2a95a7368aac6 100644 --- a/src/allocator.rs +++ b/src/allocator.rs @@ -104,7 +104,8 @@ fn create_const_value_function( tcx.sess.default_visibility(), ))); - func.add_attribute(FnAttribute::AlwaysInline); + // FIXME(antoyo): cg_llvm sets AlwaysInline, but AlwaysInline is different in GCC and using + // it here will causes linking errors when using LTO. func.add_attribute(FnAttribute::Inline); } From 071606b2a385b734fddf27d67a60484dc7df1797 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Fri, 1 Aug 2025 11:01:59 -0400 Subject: [PATCH 063/118] Add failing LTO test --- tests/failing-lto-tests.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/failing-lto-tests.txt b/tests/failing-lto-tests.txt index b1ae1e91078be..bf0633f732000 100644 --- a/tests/failing-lto-tests.txt +++ b/tests/failing-lto-tests.txt @@ -30,3 +30,4 @@ tests/ui/macros/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs tests/ui/macros/stringify.rs tests/ui/rfcs/rfc-1937-termination-trait/termination-trait-in-test.rs tests/ui/binding/fn-arg-incomplete-pattern-drop-order.rs +tests/ui/lto/debuginfo-lto-alloc.rs From bc5c2229d01cd06bfd91476636409d619bd5a807 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Fri, 1 Aug 2025 11:29:05 -0400 Subject: [PATCH 064/118] Fix issues in count_leading_zeroes --- src/intrinsic/mod.rs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/intrinsic/mod.rs b/src/intrinsic/mod.rs index 21b650bdecd70..57bdbad5e5368 100644 --- a/src/intrinsic/mod.rs +++ b/src/intrinsic/mod.rs @@ -889,10 +889,17 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { // TODO(antoyo): use width? let arg_type = arg.get_type(); let result_type = self.u32_type; + let arg = if arg_type.is_signed(self.cx) { + let new_type = arg_type.to_unsigned(self.cx); + self.gcc_int_cast(arg, new_type) + } else { + arg + }; + let arg_type = arg.get_type(); let count_leading_zeroes = // TODO(antoyo): write a new function Type::is_compatible_with(&Type) and use it here // instead of using is_uint(). - if arg_type.is_uint(self.cx) { + if arg_type.is_uchar(self.cx) || arg_type.is_ushort(self.cx) || arg_type.is_uint(self.cx) { "__builtin_clz" } else if arg_type.is_ulong(self.cx) { From 053f68151b8e6fb079b6a9254699d5a46220e52f Mon Sep 17 00:00:00 2001 From: Ifeanyi Orizu Date: Thu, 31 Jul 2025 22:38:49 -0500 Subject: [PATCH 065/118] Update documentation for overrideCommand config options --- .../rust-analyzer/crates/rust-analyzer/src/config.rs | 11 +++++++++-- .../docs/book/src/configuration_generated.md | 11 +++++++++-- src/tools/rust-analyzer/editors/code/package.json | 6 +++--- 3 files changed, 21 insertions(+), 7 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 70d04485ca08c..1a00295b9ac18 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -726,7 +726,9 @@ config_data! { /// ```bash /// cargo check --quiet --workspace --message-format=json --all-targets --keep-going /// ``` - /// . + /// + /// Note: The option must be specified as an array of command line arguments, with + /// the first argument being the name of the command to run. cargo_buildScripts_overrideCommand: Option> = None, /// Rerun proc-macros building/build-scripts running when proc-macro /// or build-script sources change and are saved. @@ -840,7 +842,9 @@ config_data! { /// ```bash /// cargo check --workspace --message-format=json --all-targets /// ``` - /// . + /// + /// Note: The option must be specified as an array of command line arguments, with + /// the first argument being the name of the command to run. check_overrideCommand | checkOnSave_overrideCommand: Option> = None, /// Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. /// @@ -890,6 +894,9 @@ config_data! { /// not that of `cargo fmt`. The file contents will be passed on the /// standard input and the formatted result will be read from the /// standard output. + /// + /// Note: The option must be specified as an array of command line arguments, with + /// the first argument being the name of the command to run. rustfmt_overrideCommand: Option> = None, /// Enables the use of rustfmt's unstable range formatting command for the /// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only diff --git a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md index 05299f1d017ef..99a30d8f62138 100644 --- a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md +++ b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md @@ -104,7 +104,9 @@ targets and features, with the following base command line: ```bash cargo check --quiet --workspace --message-format=json --all-targets --keep-going ``` -. + +Note: The option must be specified as an array of command line arguments, with +the first argument being the name of the command to run. ## rust-analyzer.cargo.buildScripts.rebuildOnSave {#cargo.buildScripts.rebuildOnSave} @@ -331,7 +333,9 @@ An example command would be: ```bash cargo check --workspace --message-format=json --all-targets ``` -. + +Note: The option must be specified as an array of command line arguments, with +the first argument being the name of the command to run. ## rust-analyzer.check.targets {#check.targets} @@ -1343,6 +1347,9 @@ not that of `cargo fmt`. The file contents will be passed on the standard input and the formatted result will be read from the standard output. +Note: The option must be specified as an array of command line arguments, with +the first argument being the name of the command to run. + ## rust-analyzer.rustfmt.rangeFormatting.enable {#rustfmt.rangeFormatting.enable} diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 8953a30dacb2c..470db244f14bd 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -887,7 +887,7 @@ "title": "Cargo", "properties": { "rust-analyzer.cargo.buildScripts.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#`.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets --keep-going\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#`.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets --keep-going\n```\n\nNote: The option must be specified as an array of command line arguments, with\nthe first argument being the name of the command to run.", "default": null, "type": [ "null", @@ -1207,7 +1207,7 @@ "title": "Check", "properties": { "rust-analyzer.check.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the future.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the future.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n\nNote: The option must be specified as an array of command line arguments, with\nthe first argument being the name of the command to run.", "default": null, "type": [ "null", @@ -2808,7 +2808,7 @@ "title": "Rustfmt", "properties": { "rust-analyzer.rustfmt.overrideCommand": { - "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for\nformatting. This should be the equivalent of `rustfmt` here, and\nnot that of `cargo fmt`. The file contents will be passed on the\nstandard input and the formatted result will be read from the\nstandard output.", + "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for\nformatting. This should be the equivalent of `rustfmt` here, and\nnot that of `cargo fmt`. The file contents will be passed on the\nstandard input and the formatted result will be read from the\nstandard output.\n\nNote: The option must be specified as an array of command line arguments, with\nthe first argument being the name of the command to run.", "default": null, "type": [ "null", From 7d167260ca00ab7778d3198d9b34fbb3fd95d1e4 Mon Sep 17 00:00:00 2001 From: Ifeanyi Orizu Date: Fri, 1 Aug 2025 00:33:01 -0500 Subject: [PATCH 066/118] Fix more docs --- .../docs/book/src/contributing/style.md | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/style.md b/src/tools/rust-analyzer/docs/book/src/contributing/style.md index 5654e37753a57..746f3eb132117 100644 --- a/src/tools/rust-analyzer/docs/book/src/contributing/style.md +++ b/src/tools/rust-analyzer/docs/book/src/contributing/style.md @@ -49,8 +49,8 @@ In this case, we'll probably ask you to split API changes into a separate PR. Changes of the third group should be pretty rare, so we don't specify any specific process for them. That said, adding an innocent-looking `pub use` is a very simple way to break encapsulation, keep an eye on it! -Note: if you enjoyed this abstract hand-waving about boundaries, you might appreciate -https://www.tedinski.com/2018/02/06/system-boundaries.html +Note: if you enjoyed this abstract hand-waving about boundaries, you might appreciate [this post](https://www.tedinski.com/2018/02/06/system-boundaries.html). + ## Crates.io Dependencies @@ -231,7 +231,7 @@ fn is_string_literal(s: &str) -> bool { } ``` -In the "Not as good" version, the precondition that `1` is a valid char boundary is checked in `is_string_literal` and used in `foo`. +In the "Bad" version, the precondition that `1` and `s.len() - 1` are valid string literal boundaries is checked in `is_string_literal` but used in `main`. In the "Good" version, the precondition check and usage are checked in the same block, and then encoded in the types. **Rationale:** non-local code properties degrade under change. @@ -271,6 +271,8 @@ fn f() { } ``` +See also [this post](https://matklad.github.io/2023/11/15/push-ifs-up-and-fors-down.html) + ## Assertions Assert liberally. @@ -608,7 +610,7 @@ Avoid making a lot of code type parametric, *especially* on the boundaries betwe ```rust // GOOD -fn frobnicate(f: impl FnMut()) { +fn frobnicate(mut f: impl FnMut()) { frobnicate_impl(&mut f) } fn frobnicate_impl(f: &mut dyn FnMut()) { @@ -616,7 +618,7 @@ fn frobnicate_impl(f: &mut dyn FnMut()) { } // BAD -fn frobnicate(f: impl FnMut()) { +fn frobnicate(mut f: impl FnMut()) { // lots of code } ``` @@ -975,7 +977,7 @@ Don't use the `ref` keyword. **Rationale:** consistency & simplicity. `ref` was required before [match ergonomics](https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md). Today, it is redundant. -Between `ref` and mach ergonomics, the latter is more ergonomic in most cases, and is simpler (does not require a keyword). +Between `ref` and match ergonomics, the latter is more ergonomic in most cases, and is simpler (does not require a keyword). ## Empty Match Arms From f516d4c9e341847d8f452adef68fe99cfdcfccc0 Mon Sep 17 00:00:00 2001 From: Mingwei Samuel Date: Fri, 25 Jul 2025 13:21:50 -0700 Subject: [PATCH 067/118] rustdoc font links only emit `crossorigin` when needed The `crossorigin` attribute may cause issues when the href is not actuall across origins. Specifically, the tag causes the browser to send a preflight OPTIONS request to the href even if it is same-origin. Some tempermental servers may reject all CORS preflect requests even if they're actually same-origin, which causes a CORS error and prevents the fonts from loading, even later on. This commit fixes that problem by not emitting `crossorigin` if the url looks like a domain-relative url. Co-authored-by: Guillaume Gomez --- src/librustdoc/html/layout.rs | 29 +++++++++++++++++++++++++ src/librustdoc/html/layout/tests.rs | 24 ++++++++++++++++++++ src/librustdoc/html/templates/page.html | 2 +- 3 files changed, 54 insertions(+), 1 deletion(-) create mode 100644 src/librustdoc/html/layout/tests.rs diff --git a/src/librustdoc/html/layout.rs b/src/librustdoc/html/layout.rs index 1f92c521d46e5..2782e8e0058ca 100644 --- a/src/librustdoc/html/layout.rs +++ b/src/librustdoc/html/layout.rs @@ -8,6 +8,9 @@ use super::static_files::{STATIC_FILES, StaticFiles}; use crate::externalfiles::ExternalHtml; use crate::html::render::{StylePath, ensure_trailing_slash}; +#[cfg(test)] +mod tests; + pub(crate) struct Layout { pub(crate) logo: String, pub(crate) favicon: String, @@ -68,6 +71,13 @@ struct PageLayout<'a> { display_krate_version_extra: &'a str, } +impl PageLayout<'_> { + /// See [`may_remove_crossorigin`]. + fn static_root_path_may_remove_crossorigin(&self) -> bool { + may_remove_crossorigin(&self.static_root_path) + } +} + pub(crate) use crate::html::render::sidebar::filters; pub(crate) fn render( @@ -134,3 +144,22 @@ pub(crate) fn redirect(url: &str) -> String { "##, ) } + +/// Conservatively determines if `href` is relative to the current origin, +/// so that `crossorigin` may be safely removed from `` elements. +pub(crate) fn may_remove_crossorigin(href: &str) -> bool { + // Reject scheme-relative URLs (`//example.com/`). + if href.starts_with("//") { + return false; + } + // URL is interpreted as having a scheme iff: it starts with an ascii alpha, and only + // contains ascii alphanumeric or `+` `-` `.` up to the `:`. + // https://url.spec.whatwg.org/#url-parsing + let has_scheme = href.split_once(':').is_some_and(|(scheme, _rest)| { + let mut chars = scheme.chars(); + chars.next().is_some_and(|c| c.is_ascii_alphabetic()) + && chars.all(|c| c.is_ascii_alphanumeric() || c == '+' || c == '-' || c == '.') + }); + // Reject anything with a scheme (`http:`, etc.). + !has_scheme +} diff --git a/src/librustdoc/html/layout/tests.rs b/src/librustdoc/html/layout/tests.rs new file mode 100644 index 0000000000000..d4a19ee9abfdf --- /dev/null +++ b/src/librustdoc/html/layout/tests.rs @@ -0,0 +1,24 @@ +#[test] +fn test_may_remove_crossorigin() { + use super::may_remove_crossorigin; + + assert!(may_remove_crossorigin("font.woff2")); + assert!(may_remove_crossorigin("/font.woff2")); + assert!(may_remove_crossorigin("./font.woff2")); + assert!(may_remove_crossorigin(":D/font.woff2")); + assert!(may_remove_crossorigin("../font.woff2")); + + assert!(!may_remove_crossorigin("//example.com/static.files")); + assert!(!may_remove_crossorigin("http://example.com/static.files")); + assert!(!may_remove_crossorigin("https://example.com/static.files")); + assert!(!may_remove_crossorigin("https://example.com:8080/static.files")); + + assert!(!may_remove_crossorigin("ftp://example.com/static.files")); + assert!(!may_remove_crossorigin("blob:http://example.com/static.files")); + assert!(!may_remove_crossorigin("javascript:alert('Hello, world!')")); + assert!(!may_remove_crossorigin("//./C:")); + assert!(!may_remove_crossorigin("file:////C:")); + assert!(!may_remove_crossorigin("file:///./C:")); + assert!(!may_remove_crossorigin("data:,Hello%2C%20World%21")); + assert!(!may_remove_crossorigin("hi...:hello")); +} diff --git a/src/librustdoc/html/templates/page.html b/src/librustdoc/html/templates/page.html index 7af99e7097c37..398436e3fe13b 100644 --- a/src/librustdoc/html/templates/page.html +++ b/src/librustdoc/html/templates/page.html @@ -7,7 +7,7 @@ {# #} {{page.title}} {# #} {# #} {# #} From 5b03d0711ad6a2af1199fd28488a5c3ed813b130 Mon Sep 17 00:00:00 2001 From: Jieyou Xu Date: Sat, 2 Aug 2025 15:55:56 +0800 Subject: [PATCH 068/118] Pull out unexpected extension check into own function --- src/tools/tidy/src/ui_tests.rs | 88 ++++++++++++++++++---------------- 1 file changed, 46 insertions(+), 42 deletions(-) diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs index 4d195b3952e27..91b89d4114972 100644 --- a/src/tools/tidy/src/ui_tests.rs +++ b/src/tools/tidy/src/ui_tests.rs @@ -7,41 +7,6 @@ use std::fs; use std::io::Write; use std::path::{Path, PathBuf}; -const EXPECTED_TEST_FILE_EXTENSIONS: &[&str] = &[ - "rs", // test source files - "stderr", // expected stderr file, corresponds to a rs file - "svg", // expected svg file, corresponds to a rs file, equivalent to stderr - "stdout", // expected stdout file, corresponds to a rs file - "fixed", // expected source file after applying fixes - "md", // test directory descriptions - "ftl", // translation tests -]; - -const EXTENSION_EXCEPTION_PATHS: &[&str] = &[ - "tests/ui/asm/named-asm-labels.s", // loading an external asm file to test named labels lint - "tests/ui/codegen/mismatched-data-layout.json", // testing mismatched data layout w/ custom targets - "tests/ui/check-cfg/my-awesome-platform.json", // testing custom targets with cfgs - "tests/ui/argfile/commandline-argfile-badutf8.args", // passing args via a file - "tests/ui/argfile/commandline-argfile.args", // passing args via a file - "tests/ui/crate-loading/auxiliary/libfoo.rlib", // testing loading a manually created rlib - "tests/ui/include-macros/data.bin", // testing including data with the include macros - "tests/ui/include-macros/file.txt", // testing including data with the include macros - "tests/ui/macros/macro-expanded-include/file.txt", // testing including data with the include macros - "tests/ui/macros/not-utf8.bin", // testing including data with the include macros - "tests/ui/macros/syntax-extension-source-utils-files/includeme.fragment", // more include - "tests/ui/proc-macro/auxiliary/included-file.txt", // more include - "tests/ui/unpretty/auxiliary/data.txt", // more include - "tests/ui/invalid/foo.natvis.xml", // sample debugger visualizer - "tests/ui/sanitizer/dataflow-abilist.txt", // dataflow sanitizer ABI list file - "tests/ui/shell-argfiles/shell-argfiles.args", // passing args via a file - "tests/ui/shell-argfiles/shell-argfiles-badquotes.args", // passing args via a file - "tests/ui/shell-argfiles/shell-argfiles-via-argfile-shell.args", // passing args via a file - "tests/ui/shell-argfiles/shell-argfiles-via-argfile.args", // passing args via a file - "tests/ui/std/windows-bat-args1.bat", // tests escaping arguments through batch files - "tests/ui/std/windows-bat-args2.bat", // tests escaping arguments through batch files - "tests/ui/std/windows-bat-args3.bat", // tests escaping arguments through batch files -]; - pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { let issues_txt_header = r#"============================================================ ⚠️⚠️⚠️NOTHING SHOULD EVER BE ADDED TO THIS LIST⚠️⚠️⚠️ @@ -82,13 +47,7 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { crate::walk::walk_no_read(&paths, |_, _| false, &mut |entry| { let file_path = entry.path(); if let Some(ext) = file_path.extension().and_then(OsStr::to_str) { - // files that are neither an expected extension or an exception should not exist - // they're probably typos or not meant to exist - if !(EXPECTED_TEST_FILE_EXTENSIONS.contains(&ext) - || EXTENSION_EXCEPTION_PATHS.iter().any(|path| file_path.ends_with(path))) - { - tidy_error!(bad, "file {} has unexpected extension {}", file_path.display(), ext); - } + check_unexpected_extension(bad, file_path, ext); // NB: We do not use file_stem() as some file names have multiple `.`s and we // must strip all of them. @@ -171,3 +130,48 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { } } } + +fn check_unexpected_extension(bad: &mut bool, file_path: &Path, ext: &str) { + const EXPECTED_TEST_FILE_EXTENSIONS: &[&str] = &[ + "rs", // test source files + "stderr", // expected stderr file, corresponds to a rs file + "svg", // expected svg file, corresponds to a rs file, equivalent to stderr + "stdout", // expected stdout file, corresponds to a rs file + "fixed", // expected source file after applying fixes + "md", // test directory descriptions + "ftl", // translation tests + ]; + + const EXTENSION_EXCEPTION_PATHS: &[&str] = &[ + "tests/ui/asm/named-asm-labels.s", // loading an external asm file to test named labels lint + "tests/ui/codegen/mismatched-data-layout.json", // testing mismatched data layout w/ custom targets + "tests/ui/check-cfg/my-awesome-platform.json", // testing custom targets with cfgs + "tests/ui/argfile/commandline-argfile-badutf8.args", // passing args via a file + "tests/ui/argfile/commandline-argfile.args", // passing args via a file + "tests/ui/crate-loading/auxiliary/libfoo.rlib", // testing loading a manually created rlib + "tests/ui/include-macros/data.bin", // testing including data with the include macros + "tests/ui/include-macros/file.txt", // testing including data with the include macros + "tests/ui/macros/macro-expanded-include/file.txt", // testing including data with the include macros + "tests/ui/macros/not-utf8.bin", // testing including data with the include macros + "tests/ui/macros/syntax-extension-source-utils-files/includeme.fragment", // more include + "tests/ui/proc-macro/auxiliary/included-file.txt", // more include + "tests/ui/unpretty/auxiliary/data.txt", // more include + "tests/ui/invalid/foo.natvis.xml", // sample debugger visualizer + "tests/ui/sanitizer/dataflow-abilist.txt", // dataflow sanitizer ABI list file + "tests/ui/shell-argfiles/shell-argfiles.args", // passing args via a file + "tests/ui/shell-argfiles/shell-argfiles-badquotes.args", // passing args via a file + "tests/ui/shell-argfiles/shell-argfiles-via-argfile-shell.args", // passing args via a file + "tests/ui/shell-argfiles/shell-argfiles-via-argfile.args", // passing args via a file + "tests/ui/std/windows-bat-args1.bat", // tests escaping arguments through batch files + "tests/ui/std/windows-bat-args2.bat", // tests escaping arguments through batch files + "tests/ui/std/windows-bat-args3.bat", // tests escaping arguments through batch files + ]; + + // files that are neither an expected extension or an exception should not exist + // they're probably typos or not meant to exist + if !(EXPECTED_TEST_FILE_EXTENSIONS.contains(&ext) + || EXTENSION_EXCEPTION_PATHS.iter().any(|path| file_path.ends_with(path))) + { + tidy_error!(bad, "file {} has unexpected extension {}", file_path.display(), ext); + } +} From c10dc999f004aa04d29652f6fa9dc9535cb10899 Mon Sep 17 00:00:00 2001 From: Jieyou Xu Date: Sat, 2 Aug 2025 15:58:53 +0800 Subject: [PATCH 069/118] Pull out stray/empty output snapshot checks into own functions --- src/tools/tidy/src/ui_tests.rs | 51 +++++++++++++++++++--------------- 1 file changed, 29 insertions(+), 22 deletions(-) diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs index 91b89d4114972..ee26e81c97f64 100644 --- a/src/tools/tidy/src/ui_tests.rs +++ b/src/tools/tidy/src/ui_tests.rs @@ -54,28 +54,8 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { let testname = file_path.file_name().unwrap().to_str().unwrap().split_once('.').unwrap().0; if ext == "stderr" || ext == "stdout" || ext == "fixed" { - // Test output filenames have one of the formats: - // ``` - // $testname.stderr - // $testname.$mode.stderr - // $testname.$revision.stderr - // $testname.$revision.$mode.stderr - // ``` - // - // For now, just make sure that there is a corresponding - // `$testname.rs` file. - - if !file_path.with_file_name(testname).with_extension("rs").exists() - && !testname.contains("ignore-tidy") - { - tidy_error!(bad, "Stray file with UI testing output: {:?}", file_path); - } - - if let Ok(metadata) = fs::metadata(file_path) - && metadata.len() == 0 - { - tidy_error!(bad, "Empty file with UI testing output: {:?}", file_path); - } + check_stray_output_snapshot(bad, file_path, testname); + check_empty_output_snapshot(bad, file_path); } if ext == "rs" @@ -175,3 +155,30 @@ fn check_unexpected_extension(bad: &mut bool, file_path: &Path, ext: &str) { tidy_error!(bad, "file {} has unexpected extension {}", file_path.display(), ext); } } + +fn check_stray_output_snapshot(bad: &mut bool, file_path: &Path, testname: &str) { + // Test output filenames have one of the formats: + // ``` + // $testname.stderr + // $testname.$mode.stderr + // $testname.$revision.stderr + // $testname.$revision.$mode.stderr + // ``` + // + // For now, just make sure that there is a corresponding + // `$testname.rs` file. + + if !file_path.with_file_name(testname).with_extension("rs").exists() + && !testname.contains("ignore-tidy") + { + tidy_error!(bad, "Stray file with UI testing output: {:?}", file_path); + } +} + +fn check_empty_output_snapshot(bad: &mut bool, file_path: &Path) { + if let Ok(metadata) = fs::metadata(file_path) + && metadata.len() == 0 + { + tidy_error!(bad, "Empty file with UI testing output: {:?}", file_path); + } +} From a71428825a3322d2662efdc4299f9cfac3e3f5e5 Mon Sep 17 00:00:00 2001 From: Jieyou Xu Date: Sat, 2 Aug 2025 16:09:10 +0800 Subject: [PATCH 070/118] Pull out non-descriptive test name check to own function --- src/tools/tidy/src/ui_tests.rs | 60 ++++++++++++++++++++++------------ 1 file changed, 39 insertions(+), 21 deletions(-) diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs index ee26e81c97f64..98a6b466ae914 100644 --- a/src/tools/tidy/src/ui_tests.rs +++ b/src/tools/tidy/src/ui_tests.rs @@ -58,27 +58,14 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { check_empty_output_snapshot(bad, file_path); } - if ext == "rs" - && let Some(test_name) = static_regex!(r"^issues?[-_]?(\d{3,})").captures(testname) - { - // these paths are always relative to the passed `path` and always UTF8 - let stripped_path = file_path - .strip_prefix(path) - .unwrap() - .to_str() - .unwrap() - .replace(std::path::MAIN_SEPARATOR_STR, "/"); - - if !remaining_issue_names.remove(stripped_path.as_str()) - && !stripped_path.starts_with("ui/issues/") - { - tidy_error!( - bad, - "file `tests/{stripped_path}` must begin with a descriptive name, consider `{{reason}}-issue-{issue_n}.rs`", - issue_n = &test_name[1], - ); - } - } + deny_new_nondescriptive_test_names( + bad, + path, + &mut remaining_issue_names, + file_path, + testname, + ext, + ); } }); @@ -182,3 +169,34 @@ fn check_empty_output_snapshot(bad: &mut bool, file_path: &Path) { tidy_error!(bad, "Empty file with UI testing output: {:?}", file_path); } } + +fn deny_new_nondescriptive_test_names( + bad: &mut bool, + path: &Path, + remaining_issue_names: &mut BTreeSet<&str>, + file_path: &Path, + testname: &str, + ext: &str, +) { + if ext == "rs" + && let Some(test_name) = static_regex!(r"^issues?[-_]?(\d{3,})").captures(testname) + { + // these paths are always relative to the passed `path` and always UTF8 + let stripped_path = file_path + .strip_prefix(path) + .unwrap() + .to_str() + .unwrap() + .replace(std::path::MAIN_SEPARATOR_STR, "/"); + + if !remaining_issue_names.remove(stripped_path.as_str()) + && !stripped_path.starts_with("ui/issues/") + { + tidy_error!( + bad, + "file `tests/{stripped_path}` must begin with a descriptive name, consider `{{reason}}-issue-{issue_n}.rs`", + issue_n = &test_name[1], + ); + } + } +} From a97d0aabc8bbaeff1aff88df67bc99e8c778ba06 Mon Sep 17 00:00:00 2001 From: Jieyou Xu Date: Sat, 2 Aug 2025 16:12:56 +0800 Subject: [PATCH 071/118] Make `issues_txt_header` a const --- src/tools/tidy/src/ui_tests.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs index 98a6b466ae914..6c83764cc1943 100644 --- a/src/tools/tidy/src/ui_tests.rs +++ b/src/tools/tidy/src/ui_tests.rs @@ -7,12 +7,12 @@ use std::fs; use std::io::Write; use std::path::{Path, PathBuf}; -pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { - let issues_txt_header = r#"============================================================ +const ISSUES_TXT_HEADER: &str = r#"============================================================ ⚠️⚠️⚠️NOTHING SHOULD EVER BE ADDED TO THIS LIST⚠️⚠️⚠️ ============================================================ "#; +pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { let path = &root_path.join("tests"); // the list of files in ui tests that are allowed to start with `issue-XXXX` @@ -20,7 +20,7 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { let mut prev_line = ""; let mut is_sorted = true; let allowed_issue_names: BTreeSet<_> = include_str!("issues.txt") - .strip_prefix(issues_txt_header) + .strip_prefix(ISSUES_TXT_HEADER) .unwrap() .lines() .inspect(|&line| { @@ -78,7 +78,7 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { // so we don't bork things on panic or a contributor using Ctrl+C let blessed_issues_path = tidy_src.join("issues_blessed.txt"); let mut blessed_issues_txt = fs::File::create(&blessed_issues_path).unwrap(); - blessed_issues_txt.write_all(issues_txt_header.as_bytes()).unwrap(); + blessed_issues_txt.write_all(ISSUES_TXT_HEADER.as_bytes()).unwrap(); // If we changed paths to use the OS separator, reassert Unix chauvinism for blessing. for filename in allowed_issue_names.difference(&remaining_issue_names) { writeln!(blessed_issues_txt, "{filename}").unwrap(); From fa31c7d49e7456b57bc32de118e72e0d7045ef6e Mon Sep 17 00:00:00 2001 From: Jieyou Xu Date: Sat, 2 Aug 2025 16:16:41 +0800 Subject: [PATCH 072/118] Pull out recursive ui test check into its own function --- src/tools/tidy/src/ui_tests.rs | 65 +++++++++++++++++++--------------- 1 file changed, 37 insertions(+), 28 deletions(-) diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs index 6c83764cc1943..b968ea5f2d89a 100644 --- a/src/tools/tidy/src/ui_tests.rs +++ b/src/tools/tidy/src/ui_tests.rs @@ -40,34 +40,7 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { ); } - let mut remaining_issue_names: BTreeSet<&str> = allowed_issue_names.clone(); - - let (ui, ui_fulldeps) = (path.join("ui"), path.join("ui-fulldeps")); - let paths = [ui.as_path(), ui_fulldeps.as_path()]; - crate::walk::walk_no_read(&paths, |_, _| false, &mut |entry| { - let file_path = entry.path(); - if let Some(ext) = file_path.extension().and_then(OsStr::to_str) { - check_unexpected_extension(bad, file_path, ext); - - // NB: We do not use file_stem() as some file names have multiple `.`s and we - // must strip all of them. - let testname = - file_path.file_name().unwrap().to_str().unwrap().split_once('.').unwrap().0; - if ext == "stderr" || ext == "stdout" || ext == "fixed" { - check_stray_output_snapshot(bad, file_path, testname); - check_empty_output_snapshot(bad, file_path); - } - - deny_new_nondescriptive_test_names( - bad, - path, - &mut remaining_issue_names, - file_path, - testname, - ext, - ); - } - }); + let remaining_issue_names = recursively_check_ui_tests(bad, path, &allowed_issue_names); // if there are any file names remaining, they were moved on the fs. // our data must remain up to date, so it must be removed from issues.txt @@ -98,6 +71,42 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { } } +fn recursively_check_ui_tests<'issues>( + bad: &mut bool, + path: &Path, + allowed_issue_names: &'issues BTreeSet<&'issues str>, +) -> BTreeSet<&'issues str> { + let mut remaining_issue_names: BTreeSet<&str> = allowed_issue_names.clone(); + + let (ui, ui_fulldeps) = (path.join("ui"), path.join("ui-fulldeps")); + let paths = [ui.as_path(), ui_fulldeps.as_path()]; + crate::walk::walk_no_read(&paths, |_, _| false, &mut |entry| { + let file_path = entry.path(); + if let Some(ext) = file_path.extension().and_then(OsStr::to_str) { + check_unexpected_extension(bad, file_path, ext); + + // NB: We do not use file_stem() as some file names have multiple `.`s and we + // must strip all of them. + let testname = + file_path.file_name().unwrap().to_str().unwrap().split_once('.').unwrap().0; + if ext == "stderr" || ext == "stdout" || ext == "fixed" { + check_stray_output_snapshot(bad, file_path, testname); + check_empty_output_snapshot(bad, file_path); + } + + deny_new_nondescriptive_test_names( + bad, + path, + &mut remaining_issue_names, + file_path, + testname, + ext, + ); + } + }); + remaining_issue_names +} + fn check_unexpected_extension(bad: &mut bool, file_path: &Path, ext: &str) { const EXPECTED_TEST_FILE_EXTENSIONS: &[&str] = &[ "rs", // test source files From 0b1547e9c052b29d246a8e5cb3d6408407e88dab Mon Sep 17 00:00:00 2001 From: Jieyou Xu Date: Sat, 2 Aug 2025 16:54:26 +0800 Subject: [PATCH 073/118] Reject adding new UI tests directly under `tests/ui/` As we want future UI tests to be added under a more meaningful subdirectory instead. --- src/tools/tidy/src/ui_tests.rs | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs index b968ea5f2d89a..5bf966b658c63 100644 --- a/src/tools/tidy/src/ui_tests.rs +++ b/src/tools/tidy/src/ui_tests.rs @@ -40,6 +40,8 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { ); } + deny_new_top_level_ui_tests(bad, &path.join("ui")); + let remaining_issue_names = recursively_check_ui_tests(bad, path, &allowed_issue_names); // if there are any file names remaining, they were moved on the fs. @@ -71,6 +73,34 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { } } +fn deny_new_top_level_ui_tests(bad: &mut bool, tests_path: &Path) { + // See where we propose banning adding + // new ui tests *directly* under `tests/ui/`. For more context, see: + // + // - + // - + + let top_level_ui_tests = walkdir::WalkDir::new(tests_path) + .min_depth(1) + .max_depth(1) + .follow_links(false) + .same_file_system(true) + .into_iter() + .flatten() + .filter(|e| { + let file_name = e.file_name(); + file_name != ".gitattributes" && file_name != "README.md" + }) + .filter(|e| !e.file_type().is_dir()); + for entry in top_level_ui_tests { + tidy_error!( + bad, + "ui tests should be added under meaningful subdirectories: `{}`", + entry.path().display() + ) + } +} + fn recursively_check_ui_tests<'issues>( bad: &mut bool, path: &Path, From 81c4086a03dcd5f114ed9d739622805cf77cb738 Mon Sep 17 00:00:00 2001 From: Hmikihiro <34ttrweoewiwe28@gmail.com> Date: Sat, 2 Aug 2025 19:13:07 +0900 Subject: [PATCH 074/118] Migrate `convert_from_to_tryfrom` assist to use `SyntaxEditor` --- .../src/handlers/convert_from_to_tryfrom.rs | 42 ++++++++++--------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs index a4742bc7bded9..f1cc3d90b9c56 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs @@ -1,9 +1,7 @@ use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait}; -use itertools::Itertools; -use syntax::{ - ast::{self, AstNode, HasGenericArgs, HasName, make}, - ted, -}; +use syntax::ast::edit::IndentLevel; +use syntax::ast::{self, AstNode, HasGenericArgs, HasName, make}; +use syntax::syntax_editor::{Element, Position}; use crate::{AssistContext, AssistId, Assists}; @@ -49,6 +47,7 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_> }; let associated_items = impl_.assoc_item_list()?; + let associated_l_curly = associated_items.l_curly_token()?; let from_fn = associated_items.assoc_items().find_map(|item| { if let ast::AssocItem::Fn(f) = item && f.name()?.text() == "from" @@ -75,30 +74,25 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_> "Convert From to TryFrom", impl_.syntax().text_range(), |builder| { - let trait_ty = builder.make_mut(trait_ty); - let from_fn_return_type = builder.make_mut(from_fn_return_type); - let from_fn_name = builder.make_mut(from_fn_name); - let tail_expr = builder.make_mut(tail_expr); - let return_exprs = return_exprs.map(|r| builder.make_mut(r)).collect_vec(); - let associated_items = builder.make_mut(associated_items); - - ted::replace( + let mut editor = builder.make_editor(impl_.syntax()); + editor.replace( trait_ty.syntax(), make::ty(&format!("TryFrom<{from_type}>")).syntax().clone_for_update(), ); - ted::replace( + editor.replace( from_fn_return_type.syntax(), make::ty("Result").syntax().clone_for_update(), ); - ted::replace(from_fn_name.syntax(), make::name("try_from").syntax().clone_for_update()); - ted::replace( + editor + .replace(from_fn_name.syntax(), make::name("try_from").syntax().clone_for_update()); + editor.replace( tail_expr.syntax(), wrap_ok(tail_expr.clone()).syntax().clone_for_update(), ); for r in return_exprs { let t = r.expr().unwrap_or_else(make::ext::expr_unit); - ted::replace(t.syntax(), wrap_ok(t.clone()).syntax().clone_for_update()); + editor.replace(t.syntax(), wrap_ok(t.clone()).syntax().clone_for_update()); } let error_type = ast::AssocItem::TypeAlias(make::ty_alias( @@ -114,10 +108,20 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_> && let ast::AssocItem::TypeAlias(type_alias) = &error_type && let Some(ty) = type_alias.ty() { - builder.add_placeholder_snippet(cap, ty); + let placeholder = builder.make_placeholder_snippet(cap); + editor.add_annotation(ty.syntax(), placeholder); } - associated_items.add_item_at_start(error_type); + let indent = IndentLevel::from_token(&associated_l_curly) + 1; + editor.insert_all( + Position::after(associated_l_curly), + vec![ + make::tokens::whitespace(&format!("\n{indent}")).syntax_element(), + error_type.syntax().syntax_element(), + make::tokens::whitespace("\n").syntax_element(), + ], + ); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } From 5ebb0dd7da0c1c0fe6c6505fc1387d8805fd8a6d Mon Sep 17 00:00:00 2001 From: Kivooeo Date: Sat, 2 Aug 2025 19:01:07 +0500 Subject: [PATCH 075/118] update doc --- src/doc/rustc-dev-guide/src/appendix/humorust.md | 2 +- src/doc/rustc-dev-guide/src/tests/directives.md | 2 +- src/doc/rustc-dev-guide/src/tests/ui.md | 8 +++----- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/doc/rustc-dev-guide/src/appendix/humorust.md b/src/doc/rustc-dev-guide/src/appendix/humorust.md index 6df3b212aa77c..8681512ed56a9 100644 --- a/src/doc/rustc-dev-guide/src/appendix/humorust.md +++ b/src/doc/rustc-dev-guide/src/appendix/humorust.md @@ -3,7 +3,7 @@ What's a project without a sense of humor? And frankly some of these are enlightening? -- [Weird exprs test](https://github.com/rust-lang/rust/blob/master/tests/ui/weird-exprs.rs) +- [Weird exprs test](https://github.com/rust-lang/rust/blob/master/tests/ui/expr/weird-exprs.rs) - [Ferris Rap](https://fitzgen.com/2018/12/13/rust-raps.html) - [The Genesis of Generic Germination](https://github.com/rust-lang/rust/pull/53645#issue-210543221) - [The Bastion of the Turbofish test](https://github.com/rust-lang/rust/blob/79d8a0fcefa5134db2a94739b1d18daa01fc6e9f/src/test/ui/bastion-of-the-turbofish.rs) diff --git a/src/doc/rustc-dev-guide/src/tests/directives.md b/src/doc/rustc-dev-guide/src/tests/directives.md index 6fff021b0b12b..bda95e4dfbcce 100644 --- a/src/doc/rustc-dev-guide/src/tests/directives.md +++ b/src/doc/rustc-dev-guide/src/tests/directives.md @@ -359,7 +359,7 @@ described below: - Example: `x86_64-unknown-linux-gnu` See -[`tests/ui/commandline-argfile.rs`](https://github.com/rust-lang/rust/blob/master/tests/ui/argfile/commandline-argfile.rs) +[`tests/ui/argfile/commandline-argfile.rs`](https://github.com/rust-lang/rust/blob/master/tests/ui/argfile/commandline-argfile.rs) for an example of a test that uses this substitution. [output normalization]: ui.md#normalization diff --git a/src/doc/rustc-dev-guide/src/tests/ui.md b/src/doc/rustc-dev-guide/src/tests/ui.md index 782f78d76148e..eecd72695d959 100644 --- a/src/doc/rustc-dev-guide/src/tests/ui.md +++ b/src/doc/rustc-dev-guide/src/tests/ui.md @@ -25,9 +25,9 @@ If you need to work with `#![no_std]` cross-compiling tests, consult the ## General structure of a test -A test consists of a Rust source file located anywhere in the `tests/ui` -directory, but they should be placed in a suitable sub-directory. For example, -[`tests/ui/hello.rs`] is a basic hello-world test. +A test consists of a Rust source file located in the `tests/ui` directory. +**Tests must be placed in the appropriate subdirectory** based on their purpose +and testing category - placing tests directly in `tests/ui` is not permitted. Compiletest will use `rustc` to compile the test, and compare the output against the expected output which is stored in a `.stdout` or `.stderr` file located @@ -46,8 +46,6 @@ pass/fail expectations](#controlling-passfail-expectations). By default, a test is built as an executable binary. If you need a different crate type, you can use the `#![crate_type]` attribute to set it as needed. -[`tests/ui/hello.rs`]: https://github.com/rust-lang/rust/blob/master/tests/ui/hello.rs - ## Output comparison UI tests store the expected output from the compiler in `.stderr` and `.stdout` From e3a6469ac09ad7086049fc9ce3b0439432f29cea Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Fri, 1 Aug 2025 12:28:23 -0400 Subject: [PATCH 076/118] Implement SIMD funnel shifts --- src/intrinsic/simd.rs | 78 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) diff --git a/src/intrinsic/simd.rs b/src/intrinsic/simd.rs index 350915a277e33..6748e1a412579 100644 --- a/src/intrinsic/simd.rs +++ b/src/intrinsic/simd.rs @@ -206,6 +206,28 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>( ); } + #[cfg(feature = "master")] + if name == sym::simd_funnel_shl { + return Ok(simd_funnel_shift( + bx, + args[0].immediate(), + args[1].immediate(), + args[2].immediate(), + true, + )); + } + + #[cfg(feature = "master")] + if name == sym::simd_funnel_shr { + return Ok(simd_funnel_shift( + bx, + args[0].immediate(), + args[1].immediate(), + args[2].immediate(), + false, + )); + } + if name == sym::simd_bswap { return Ok(simd_bswap(bx, args[0].immediate())); } @@ -1434,3 +1456,59 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>( unimplemented!("simd {}", name); } + +#[cfg(feature = "master")] +fn simd_funnel_shift<'a, 'gcc, 'tcx>( + bx: &mut Builder<'a, 'gcc, 'tcx>, + a: RValue<'gcc>, + b: RValue<'gcc>, + shift: RValue<'gcc>, + shift_left: bool, +) -> RValue<'gcc> { + let a_type = a.get_type(); + let vector_type = a_type.unqualified().dyncast_vector().expect("vector type"); + let num_units = vector_type.get_num_units(); + let elem_type = vector_type.get_element_type(); + + let (new_int_type, int_shift_val, int_mask) = if elem_type.is_compatible_with(bx.u8_type) { + (bx.u16_type, 8, u8::MAX as u64) + } else if elem_type.is_compatible_with(bx.u16_type) { + (bx.u32_type, 16, u16::MAX as u64) + } else if elem_type.is_compatible_with(bx.u32_type) { + (bx.u64_type, 32, u32::MAX as u64) + } else if elem_type.is_compatible_with(bx.u64_type) { + (bx.u128_type, 64, u64::MAX) + } else if elem_type.is_compatible_with(bx.i8_type) { + (bx.i16_type, 8, u8::MAX as u64) + } else if elem_type.is_compatible_with(bx.i16_type) { + (bx.i32_type, 16, u16::MAX as u64) + } else if elem_type.is_compatible_with(bx.i32_type) { + (bx.i64_type, 32, u32::MAX as u64) + } else if elem_type.is_compatible_with(bx.i64_type) { + (bx.i128_type, 64, u64::MAX) + } else { + unimplemented!("funnel shift on {:?}", elem_type); + }; + + let int_mask = bx.context.new_rvalue_from_long(new_int_type, int_mask as i64); + let int_shift_val = bx.context.new_rvalue_from_int(new_int_type, int_shift_val); + let mut elements = vec![]; + for i in 0..num_units { + let index = bx.context.new_rvalue_from_int(bx.int_type, i as i32); + let a_val = bx.context.new_vector_access(None, a, index).to_rvalue(); + let a_val = bx.context.new_cast(None, a_val, new_int_type); + let b_val = bx.context.new_vector_access(None, b, index).to_rvalue(); + let b_val = bx.context.new_cast(None, b_val, new_int_type); + let shift_val = bx.context.new_vector_access(None, shift, index).to_rvalue(); + let shift_val = bx.context.new_cast(None, shift_val, new_int_type); + let mut val = a_val << int_shift_val | b_val; + if shift_left { + val = (val << shift_val) >> int_shift_val; + } else { + val = (val >> shift_val) & int_mask; + } + let val = bx.context.new_cast(None, val, elem_type); + elements.push(val); + } + bx.context.new_rvalue_from_vector(None, a_type, &elements) +} From e314bfaad38f5958c82fac6af4f4f34dcb2dc94f Mon Sep 17 00:00:00 2001 From: Hmikihiro <34ttrweoewiwe28@gmail.com> Date: Sat, 2 Aug 2025 19:13:07 +0900 Subject: [PATCH 077/118] Migrate `generate_delegate_methods` assist to use `SyntaxEditor` --- .../src/handlers/add_missing_impl_members.rs | 4 +- .../src/handlers/generate_delegate_methods.rs | 58 ++++++++++++------- .../ide-assists/src/handlers/generate_impl.rs | 1 - .../ide-assists/src/handlers/generate_new.rs | 4 +- .../replace_derive_with_manual_impl.rs | 6 +- .../crates/syntax/src/ast/make.rs | 4 +- 6 files changed, 41 insertions(+), 36 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs index 7f1e7ccb4487f..11201afb8a7f2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -183,13 +183,11 @@ fn add_missing_impl_members_inner( .clone() .into_iter() .chain(other_items.iter().cloned()) - .map(either::Either::Right) .collect::>(); let mut editor = edit.make_editor(impl_def.syntax()); if let Some(assoc_item_list) = impl_def.assoc_item_list() { - let items = new_assoc_items.into_iter().filter_map(either::Either::right).collect(); - assoc_item_list.add_items(&mut editor, items); + assoc_item_list.add_items(&mut editor, new_assoc_items); } else { let assoc_item_list = make::assoc_item_list(Some(new_assoc_items)).clone_for_update(); editor.insert_all( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs index 6063898076042..2c81e2883a34a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -2,9 +2,11 @@ use hir::{HasCrate, HasVisibility}; use ide_db::{FxHashSet, path_transform::PathTransform}; use syntax::{ ast::{ - self, AstNode, HasGenericParams, HasName, HasVisibility as _, edit_in_place::Indent, make, + self, AstNode, HasGenericParams, HasName, HasVisibility as _, + edit::{AstNodeEdit, IndentLevel}, + make, }, - ted, + syntax_editor::Position, }; use crate::{ @@ -165,54 +167,66 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' is_unsafe, is_gen, ) - .clone_for_update(); - - // Get the impl to update, or create one if we need to. - let impl_def = match impl_def { - Some(impl_def) => edit.make_mut(impl_def), + .indent(IndentLevel(1)); + let item = ast::AssocItem::Fn(f.clone()); + + let mut editor = edit.make_editor(strukt.syntax()); + let fn_: Option = match impl_def { + Some(impl_def) => match impl_def.assoc_item_list() { + Some(assoc_item_list) => { + let item = item.indent(IndentLevel::from_node(impl_def.syntax())); + assoc_item_list.add_items(&mut editor, vec![item.clone()]); + Some(item) + } + None => { + let assoc_item_list = make::assoc_item_list(Some(vec![item])); + editor.insert( + Position::last_child_of(impl_def.syntax()), + assoc_item_list.syntax(), + ); + assoc_item_list.assoc_items().next() + } + }, None => { let name = &strukt_name.to_string(); let ty_params = strukt.generic_param_list(); let ty_args = ty_params.as_ref().map(|it| it.to_generic_args()); let where_clause = strukt.where_clause(); + let assoc_item_list = make::assoc_item_list(Some(vec![item])); let impl_def = make::impl_( ty_params, ty_args, make::ty_path(make::ext::ident_path(name)), where_clause, - None, + Some(assoc_item_list), ) .clone_for_update(); // Fixup impl_def indentation let indent = strukt.indent_level(); - impl_def.reindent_to(indent); + let impl_def = impl_def.indent(indent); // Insert the impl block. let strukt = edit.make_mut(strukt.clone()); - ted::insert_all( - ted::Position::after(strukt.syntax()), + editor.insert_all( + Position::after(strukt.syntax()), vec![ make::tokens::whitespace(&format!("\n\n{indent}")).into(), impl_def.syntax().clone().into(), ], ); - - impl_def + impl_def.assoc_item_list().and_then(|list| list.assoc_items().next()) } }; - // Fixup function indentation. - // FIXME: Should really be handled by `AssocItemList::add_item` - f.reindent_to(impl_def.indent_level() + 1); - - let assoc_items = impl_def.get_or_create_assoc_item_list(); - assoc_items.add_item(f.clone().into()); - - if let Some(cap) = ctx.config.snippet_cap { - edit.add_tabstop_before(cap, f) + if let Some(cap) = ctx.config.snippet_cap + && let Some(fn_) = fn_ + { + let tabstop = edit.make_tabstop_before(cap); + editor.add_annotation(fn_.syntax(), tabstop); } + edit.add_file_edits(ctx.vfs_file_id(), editor); }, )?; } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs index fcb81d239ff3f..b38ee6f7dce8e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs @@ -201,7 +201,6 @@ pub(crate) fn generate_impl_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> &impl_, &target_scope, ); - let assoc_items = assoc_items.into_iter().map(either::Either::Right).collect(); let assoc_item_list = make::assoc_item_list(Some(assoc_items)); make_impl_(Some(assoc_item_list)) }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs index 5bda1226cda36..351f134612f00 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs @@ -168,7 +168,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option ); fn_.syntax().clone() } else { - let items = vec![either::Either::Right(ast::AssocItem::Fn(fn_))]; + let items = vec![ast::AssocItem::Fn(fn_)]; let list = make::assoc_item_list(Some(items)); editor.insert(Position::after(impl_def.syntax()), list.syntax()); list.syntax().clone() @@ -176,7 +176,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option } else { // Generate a new impl to add the method to let indent_level = strukt.indent_level(); - let body = vec![either::Either::Right(ast::AssocItem::Fn(fn_))]; + let body = vec![ast::AssocItem::Fn(fn_)]; let list = make::assoc_item_list(Some(body)); let impl_def = generate_impl_with_item(&ast::Adt::Struct(strukt.clone()), Some(list)); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 45bb6ce9129cb..175f261317058 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -221,11 +221,7 @@ fn impl_def_from_trait( } else { Some(first.clone()) }; - let items = first_item - .into_iter() - .chain(other.iter().cloned()) - .map(either::Either::Right) - .collect(); + let items = first_item.into_iter().chain(other.iter().cloned()).collect(); make::assoc_item_list(Some(items)) } else { make::assoc_item_list(None) diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index 2a7b51c3c2481..daeb79cf081dc 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -229,9 +229,7 @@ pub fn ty_fn_ptr>( } } -pub fn assoc_item_list( - body: Option>>, -) -> ast::AssocItemList { +pub fn assoc_item_list(body: Option>) -> ast::AssocItemList { let is_break_braces = body.is_some(); let body_newline = if is_break_braces { "\n".to_owned() } else { String::new() }; let body_indent = if is_break_braces { " ".to_owned() } else { String::new() }; From ac34f3db819f99fcc06a54396d6531862d6f2fb1 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 31 Jul 2025 23:33:38 +0300 Subject: [PATCH 078/118] When renaming a parameter to `self`, change callers to use method call syntax --- .../rust-analyzer/crates/base-db/src/lib.rs | 3 +- .../crates/hir-expand/src/builtin/fn_macro.rs | 2 +- .../crates/hir-expand/src/files.rs | 10 ++ .../crates/hir-ty/src/test_db.rs | 2 +- .../rust-analyzer/crates/ide-db/src/lib.rs | 2 +- .../rust-analyzer/crates/ide-db/src/search.rs | 17 +- .../crates/ide-diagnostics/src/tests.rs | 2 +- .../rust-analyzer/crates/ide-ssr/src/lib.rs | 4 +- src/tools/rust-analyzer/crates/ide/src/lib.rs | 2 +- .../rust-analyzer/crates/ide/src/rename.rs | 161 +++++++++++++++++- .../crates/rust-analyzer/src/global_state.rs | 6 +- 11 files changed, 186 insertions(+), 25 deletions(-) diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs index ad17f1730bef7..b8eadb608fea5 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs @@ -206,6 +206,7 @@ impl EditionedFileId { #[salsa_macros::input(debug)] pub struct FileText { + #[returns(ref)] pub text: Arc, pub file_id: vfs::FileId, } @@ -357,7 +358,7 @@ fn parse(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Parse Option<&[SyntaxError]> { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs index 58ab7f470c40e..ec34461376165 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs @@ -890,7 +890,7 @@ fn include_str_expand( }; let text = db.file_text(file_id.file_id(db)); - let text = &*text.text(db); + let text = &**text.text(db); ExpandResult::ok(quote!(call_site =>#text)) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs index 6730b337d356f..a7f3e27a45539 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs @@ -99,6 +99,16 @@ impl FileRange { pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FileRangeWrapper { FileRangeWrapper { file_id: self.file_id.file_id(db), range: self.range } } + + #[inline] + pub fn file_text(self, db: &dyn ExpandDatabase) -> &triomphe::Arc { + db.file_text(self.file_id.file_id(db)).text(db) + } + + #[inline] + pub fn text(self, db: &dyn ExpandDatabase) -> &str { + &self.file_text(db)[self.range] + } } /// `AstId` points to an AST node in any file. diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs index b5de0e52f5b63..775136dc0cbf7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs @@ -149,7 +149,7 @@ impl TestDB { .into_iter() .filter_map(|file_id| { let text = self.file_text(file_id.file_id(self)); - let annotations = extract_annotations(&text.text(self)); + let annotations = extract_annotations(text.text(self)); if annotations.is_empty() { return None; } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs index c94be7e164e27..49f7f63a04a42 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs @@ -244,7 +244,7 @@ pub trait LineIndexDatabase: base_db::RootQueryDb { fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc { let text = db.file_text(file_id).text(db); - Arc::new(LineIndex::new(&text)) + Arc::new(LineIndex::new(text)) } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 4dd64229d2748..abd4dc8300b39 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -487,9 +487,9 @@ impl<'a> FindUsages<'a> { scope.entries.iter().map(|(&file_id, &search_range)| { let text = db.file_text(file_id.file_id(db)).text(db); let search_range = - search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); + search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&**text))); - (text, file_id, search_range) + (text.clone(), file_id, search_range) }) } @@ -854,14 +854,7 @@ impl<'a> FindUsages<'a> { &finder, name, is_possibly_self.into_iter().map(|position| { - ( - self.sema - .db - .file_text(position.file_id.file_id(self.sema.db)) - .text(self.sema.db), - position.file_id, - position.range, - ) + (position.file_text(self.sema.db).clone(), position.file_id, position.range) }), |path, name_position| { let has_self = path @@ -1067,12 +1060,12 @@ impl<'a> FindUsages<'a> { let file_text = sema.db.file_text(file_id.file_id(self.sema.db)); let text = file_text.text(sema.db); let search_range = - search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); + search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&**text))); let tree = LazyCell::new(|| sema.parse(file_id).syntax().clone()); let finder = &Finder::new("self"); - for offset in Self::match_indices(&text, finder, search_range) { + for offset in Self::match_indices(text, finder, search_range) { for name_ref in Self::find_nodes(sema, "self", file_id, &tree, offset) .filter_map(ast::NameRef::cast) { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs index 4e4bd47e1c2f2..181993154e59f 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs @@ -229,7 +229,7 @@ pub(crate) fn check_diagnostics_with_config( let line_index = db.line_index(file_id); let mut actual = annotations.remove(&file_id).unwrap_or_default(); - let mut expected = extract_annotations(&db.file_text(file_id).text(&db)); + let mut expected = extract_annotations(db.file_text(file_id).text(&db)); expected.sort_by_key(|(range, s)| (range.start(), s.clone())); actual.sort_by_key(|(range, s)| (range.start(), s.clone())); // FIXME: We should panic on duplicates instead, but includes currently cause us to report diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs index 138af22089eb4..43ad12c1f699a 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs @@ -186,7 +186,7 @@ impl<'db> MatchFinder<'db> { replacing::matches_to_edit( self.sema.db, &matches, - &self.sema.db.file_text(file_id).text(self.sema.db), + self.sema.db.file_text(file_id).text(self.sema.db), &self.rules, ), ) @@ -228,7 +228,7 @@ impl<'db> MatchFinder<'db> { let file = self.sema.parse(file_id); let mut res = Vec::new(); let file_text = self.sema.db.file_text(file_id.file_id(self.sema.db)).text(self.sema.db); - let mut remaining_text = &*file_text; + let mut remaining_text = &**file_text; let mut base = 0; let len = snippet.len() as u32; while let Some(offset) = remaining_text.find(snippet) { diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index b3b8deb61fc0e..98877482ed863 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -299,7 +299,7 @@ impl Analysis { /// Gets the text of the source file. pub fn file_text(&self, file_id: FileId) -> Cancellable> { - self.with_db(|db| SourceDatabase::file_text(db, file_id).text(db)) + self.with_db(|db| SourceDatabase::file_text(db, file_id).text(db).clone()) } /// Gets the syntax tree of the file. diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs index 634edaa5edaf0..aea4ae0fd9702 100644 --- a/src/tools/rust-analyzer/crates/ide/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs @@ -13,8 +13,11 @@ use ide_db::{ }; use itertools::Itertools; use std::fmt::Write; -use stdx::{always, never}; -use syntax::{AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, ast}; +use stdx::{always, format_to, never}; +use syntax::{ + AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, + ast::{self, HasArgList, prec::ExprPrecedence}, +}; use ide_db::text_edit::TextEdit; @@ -331,6 +334,85 @@ fn find_definitions( } } +fn transform_assoc_fn_into_method_call( + sema: &Semantics<'_, RootDatabase>, + source_change: &mut SourceChange, + f: hir::Function, +) { + let calls = Definition::Function(f).usages(sema).all(); + for (file_id, calls) in calls { + for call in calls { + let Some(fn_name) = call.name.as_name_ref() else { continue }; + let Some(path) = fn_name.syntax().parent().and_then(ast::PathSegment::cast) else { + continue; + }; + let path = path.parent_path(); + // The `PathExpr` is the direct parent, above it is the `CallExpr`. + let Some(call) = + path.syntax().parent().and_then(|it| ast::CallExpr::cast(it.parent()?)) + else { + continue; + }; + + let Some(arg_list) = call.arg_list() else { continue }; + let mut args = arg_list.args(); + let Some(mut self_arg) = args.next() else { continue }; + let second_arg = args.next(); + + // Strip (de)references, as they will be taken automatically by auto(de)ref. + loop { + let self_ = match &self_arg { + ast::Expr::RefExpr(self_) => self_.expr(), + ast::Expr::ParenExpr(self_) => self_.expr(), + ast::Expr::PrefixExpr(self_) + if self_.op_kind() == Some(ast::UnaryOp::Deref) => + { + self_.expr() + } + _ => break, + }; + self_arg = match self_ { + Some(it) => it, + None => break, + }; + } + + let self_needs_parens = + self_arg.precedence().needs_parentheses_in(ExprPrecedence::Postfix); + + let replace_start = path.syntax().text_range().start(); + let replace_end = match second_arg { + Some(second_arg) => second_arg.syntax().text_range().start(), + None => arg_list + .r_paren_token() + .map(|it| it.text_range().start()) + .unwrap_or_else(|| arg_list.syntax().text_range().end()), + }; + let replace_range = TextRange::new(replace_start, replace_end); + + let Some(macro_mapped_self) = sema.original_range_opt(self_arg.syntax()) else { + continue; + }; + let mut replacement = String::new(); + if self_needs_parens { + replacement.push('('); + } + replacement.push_str(macro_mapped_self.text(sema.db)); + if self_needs_parens { + replacement.push(')'); + } + replacement.push('.'); + format_to!(replacement, "{fn_name}"); + replacement.push('('); + + source_change.insert_source_edit( + file_id.file_id(sema.db), + TextEdit::replace(replace_range, replacement), + ); + } + } +} + fn rename_to_self( sema: &Semantics<'_, RootDatabase>, local: hir::Local, @@ -408,6 +490,7 @@ fn rename_to_self( file_id.original_file(sema.db).file_id(sema.db), TextEdit::replace(param_source.syntax().text_range(), String::from(self_param)), ); + transform_assoc_fn_into_method_call(sema, &mut source_change, fn_def); Ok(source_change) } @@ -3412,4 +3495,78 @@ fn other_place() { Quux::Bar$0; } "#, ); } + + #[test] + fn rename_to_self_callers() { + check( + "self", + r#" +//- minicore: add +struct Foo; +impl core::ops::Add for Foo { + type Target = Foo; + fn add(self, _: Self) -> Foo { Foo } +} + +impl Foo { + fn foo(th$0is: &Self) {} +} + +fn bar(v: &Foo) { + Foo::foo(v); +} + +fn baz() { + Foo::foo(&Foo); + Foo::foo(Foo + Foo); +} + "#, + r#" +struct Foo; +impl core::ops::Add for Foo { + type Target = Foo; + fn add(self, _: Self) -> Foo { Foo } +} + +impl Foo { + fn foo(&self) {} +} + +fn bar(v: &Foo) { + v.foo(); +} + +fn baz() { + Foo.foo(); + (Foo + Foo).foo(); +} + "#, + ); + // Multiple arguments: + check( + "self", + r#" +struct Foo; + +impl Foo { + fn foo(th$0is: &Self, v: i32) {} +} + +fn bar(v: Foo) { + Foo::foo(&v, 123); +} + "#, + r#" +struct Foo; + +impl Foo { + fn foo(&self, v: i32) {} +} + +fn bar(v: Foo) { + v.foo(123); +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 3171bdd361785..2f1afba3634ef 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -448,7 +448,7 @@ impl GlobalState { tracing::info!(%vfs_path, ?change_kind, "Processing rust-analyzer.toml changes"); if vfs_path.as_path() == user_config_abs_path { tracing::info!(%vfs_path, ?change_kind, "Use config rust-analyzer.toml changes"); - change.change_user_config(Some(db.file_text(file_id).text(db))); + change.change_user_config(Some(db.file_text(file_id).text(db).clone())); } // If change has been made to a ratoml file that @@ -462,14 +462,14 @@ impl GlobalState { change.change_workspace_ratoml( source_root_id, vfs_path.clone(), - Some(db.file_text(file_id).text(db)), + Some(db.file_text(file_id).text(db).clone()), ) } else { tracing::info!(%vfs_path, ?source_root_id, "crate rust-analyzer.toml changes"); change.change_ratoml( source_root_id, vfs_path.clone(), - Some(db.file_text(file_id).text(db)), + Some(db.file_text(file_id).text(db).clone()), ) }; From 807d3406c215a9b3078e69ffc1a956b77f685ae9 Mon Sep 17 00:00:00 2001 From: Oneirical Date: Sun, 13 Jul 2025 16:06:48 -0400 Subject: [PATCH 079/118] Rehome tests/ui/issues/ tests [2/?] --- ...t-trait-item-reference-selection-26095.rs} | 2 +- .../cold-attribute-application-54044.rs} | 1 + .../cold-attribute-application-54044.stderr} | 6 ++--- .../autoderef-vec-box-fn-36786.rs} | 1 + .../borrow-checker-lifetime-error-46471.rs} | 1 + ...orrow-checker-lifetime-error-46471.stderr} | 2 +- ...tring-borrowing-pattern-matching-11869.rs} | 1 + .../u8-to-char-cast-9918.rs} | 1 + ...miscompile-metadata-invalidation-36023.rs} | 1 + ...herence-error-for-undefined-type-18058.rs} | 1 + ...nce-error-for-undefined-type-18058.stderr} | 2 +- ...lution-error-with-const-generics-77919.rs} | 1 + ...on-error-with-const-generics-77919.stderr} | 6 ++--- .../destructor-run-for-expression-4734.rs} | 1 + .../unnecessary-path-disambiguator-36116.rs} | 1 + .../collection-type-copy-behavior-12909.rs} | 1 + tests/ui/issues/auxiliary/issue-25185-2.rs | 3 --- tests/ui/issues/issue-25185.rs | 12 --------- tests/ui/issues/issue-32655.rs | 19 -------------- tests/ui/issues/issue-32655.stderr | 25 ------------------- .../iterator-type-inference-sum-15673.rs} | 1 + .../auxiliary/aux-25185-1.rs} | 0 tests/ui/linking/auxiliary/aux-25185-2.rs | 3 +++ ...ib-to-dylib-native-deps-inclusion-25185.rs | 13 ++++++++++ .../invalid-assignment-in-macro-26093.rs} | 1 + .../invalid-assignment-in-macro-26093.stderr} | 4 +-- ...ern-matching-in-function-argument-7519.rs} | 1 + ...-associated-type-in-trait-object-22434.rs} | 1 + ...ociated-type-in-trait-object-22434.stderr} | 2 +- ...ncorrect-self-type-in-trait-impl-48276.rs} | 1 + ...rect-self-type-in-trait-impl-48276.stderr} | 6 ++--- ...enthesized-type-parameters-error-32995.rs} | 1 + ...esized-type-parameters-error-32995.stderr} | 6 ++--- ...pace-conflict-in-unboxed-closure-18685.rs} | 2 +- 34 files changed, 52 insertions(+), 78 deletions(-) rename tests/ui/{issues/issue-26095.rs => associated-consts/constant-trait-item-reference-selection-26095.rs} (85%) rename tests/ui/{issues/issue-54044.rs => attributes/cold-attribute-application-54044.rs} (91%) rename tests/ui/{issues/issue-54044.stderr => attributes/cold-attribute-application-54044.stderr} (82%) rename tests/ui/{issues/issue-36786-resolve-call.rs => autoref-autoderef/autoderef-vec-box-fn-36786.rs} (77%) rename tests/ui/{issues/issue-46471-1.rs => borrowck/borrow-checker-lifetime-error-46471.rs} (75%) rename tests/ui/{issues/issue-46471-1.stderr => borrowck/borrow-checker-lifetime-error-46471.stderr} (87%) rename tests/ui/{issues/issue-11869.rs => borrowck/string-borrowing-pattern-matching-11869.rs} (81%) rename tests/ui/{issues/issue-9918.rs => cast/u8-to-char-cast-9918.rs} (59%) rename tests/ui/{issues/issue-36023.rs => codegen/llvm-miscompile-metadata-invalidation-36023.rs} (91%) rename tests/ui/{issues/issue-18058.rs => coherence/impl-coherence-error-for-undefined-type-18058.rs} (63%) rename tests/ui/{issues/issue-18058.stderr => coherence/impl-coherence-error-for-undefined-type-18058.stderr} (78%) rename tests/ui/{issues/issue-77919.rs => const-generics/trait-resolution-error-with-const-generics-77919.rs} (88%) rename tests/ui/{issues/issue-77919.stderr => const-generics/trait-resolution-error-with-const-generics-77919.stderr} (84%) rename tests/ui/{issues/issue-4734.rs => drop/destructor-run-for-expression-4734.rs} (94%) rename tests/ui/{issues/issue-36116.rs => generics/unnecessary-path-disambiguator-36116.rs} (86%) rename tests/ui/{issues/issue-12909.rs => inference/collection-type-copy-behavior-12909.rs} (90%) delete mode 100644 tests/ui/issues/auxiliary/issue-25185-2.rs delete mode 100644 tests/ui/issues/issue-25185.rs delete mode 100644 tests/ui/issues/issue-32655.rs delete mode 100644 tests/ui/issues/issue-32655.stderr rename tests/ui/{issues/issue-15673.rs => iterators/iterator-type-inference-sum-15673.rs} (76%) rename tests/ui/{issues/auxiliary/issue-25185-1.rs => linking/auxiliary/aux-25185-1.rs} (100%) create mode 100644 tests/ui/linking/auxiliary/aux-25185-2.rs create mode 100644 tests/ui/linking/rlib-to-dylib-native-deps-inclusion-25185.rs rename tests/ui/{issues/issue-26093.rs => macros/invalid-assignment-in-macro-26093.rs} (83%) rename tests/ui/{issues/issue-26093.stderr => macros/invalid-assignment-in-macro-26093.stderr} (90%) rename tests/ui/{issues/issue-7519-match-unit-in-arg.rs => pattern/unit-pattern-matching-in-function-argument-7519.rs} (72%) rename tests/ui/{issues/issue-22434.rs => type-alias/missing-associated-type-in-trait-object-22434.rs} (75%) rename tests/ui/{issues/issue-22434.stderr => type-alias/missing-associated-type-in-trait-object-22434.stderr} (84%) rename tests/ui/{issues/issue-48276.rs => typeck/incorrect-self-type-in-trait-impl-48276.rs} (93%) rename tests/ui/{issues/issue-48276.stderr => typeck/incorrect-self-type-in-trait-impl-48276.stderr} (83%) rename tests/ui/{issues/issue-32995-2.rs => typeck/parenthesized-type-parameters-error-32995.rs} (89%) rename tests/ui/{issues/issue-32995-2.stderr => typeck/parenthesized-type-parameters-error-32995.stderr} (79%) rename tests/ui/{issues/issue-18685.rs => unboxed-closures/self-param-space-conflict-in-unboxed-closure-18685.rs} (85%) diff --git a/tests/ui/issues/issue-26095.rs b/tests/ui/associated-consts/constant-trait-item-reference-selection-26095.rs similarity index 85% rename from tests/ui/issues/issue-26095.rs rename to tests/ui/associated-consts/constant-trait-item-reference-selection-26095.rs index 34c617dc495a7..f0fe2db432bc9 100644 --- a/tests/ui/issues/issue-26095.rs +++ b/tests/ui/associated-consts/constant-trait-item-reference-selection-26095.rs @@ -1,8 +1,8 @@ +// https://github.com/rust-lang/rust/issues/26095 //@ check-pass #![allow(dead_code)] #![allow(non_upper_case_globals)] - trait HasNumber { const Number: usize; } diff --git a/tests/ui/issues/issue-54044.rs b/tests/ui/attributes/cold-attribute-application-54044.rs similarity index 91% rename from tests/ui/issues/issue-54044.rs rename to tests/ui/attributes/cold-attribute-application-54044.rs index 809ea7a87dbea..2e644b91c0774 100644 --- a/tests/ui/issues/issue-54044.rs +++ b/tests/ui/attributes/cold-attribute-application-54044.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/54044 #![deny(unused_attributes)] //~ NOTE lint level is defined here #[cold] diff --git a/tests/ui/issues/issue-54044.stderr b/tests/ui/attributes/cold-attribute-application-54044.stderr similarity index 82% rename from tests/ui/issues/issue-54044.stderr rename to tests/ui/attributes/cold-attribute-application-54044.stderr index 8bd94a041d0c2..efdf5e0de527f 100644 --- a/tests/ui/issues/issue-54044.stderr +++ b/tests/ui/attributes/cold-attribute-application-54044.stderr @@ -1,5 +1,5 @@ error: attribute should be applied to a function definition - --> $DIR/issue-54044.rs:3:1 + --> $DIR/cold-attribute-application-54044.rs:4:1 | LL | #[cold] | ^^^^^^^ @@ -9,13 +9,13 @@ LL | struct Foo; | = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! note: the lint level is defined here - --> $DIR/issue-54044.rs:1:9 + --> $DIR/cold-attribute-application-54044.rs:2:9 | LL | #![deny(unused_attributes)] | ^^^^^^^^^^^^^^^^^ error: attribute should be applied to a function definition - --> $DIR/issue-54044.rs:9:5 + --> $DIR/cold-attribute-application-54044.rs:10:5 | LL | #[cold] | ^^^^^^^ diff --git a/tests/ui/issues/issue-36786-resolve-call.rs b/tests/ui/autoref-autoderef/autoderef-vec-box-fn-36786.rs similarity index 77% rename from tests/ui/issues/issue-36786-resolve-call.rs rename to tests/ui/autoref-autoderef/autoderef-vec-box-fn-36786.rs index de7b0e18d5210..e16929bf48acc 100644 --- a/tests/ui/issues/issue-36786-resolve-call.rs +++ b/tests/ui/autoref-autoderef/autoderef-vec-box-fn-36786.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/36786 //@ run-pass // Ensure that types that rely on obligations are autoderefed // correctly diff --git a/tests/ui/issues/issue-46471-1.rs b/tests/ui/borrowck/borrow-checker-lifetime-error-46471.rs similarity index 75% rename from tests/ui/issues/issue-46471-1.rs rename to tests/ui/borrowck/borrow-checker-lifetime-error-46471.rs index aa161d40f702d..020b02aa34df7 100644 --- a/tests/ui/issues/issue-46471-1.rs +++ b/tests/ui/borrowck/borrow-checker-lifetime-error-46471.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/46471 fn main() { let y = { let mut z = 0; diff --git a/tests/ui/issues/issue-46471-1.stderr b/tests/ui/borrowck/borrow-checker-lifetime-error-46471.stderr similarity index 87% rename from tests/ui/issues/issue-46471-1.stderr rename to tests/ui/borrowck/borrow-checker-lifetime-error-46471.stderr index d45172239820d..c90da55162009 100644 --- a/tests/ui/issues/issue-46471-1.stderr +++ b/tests/ui/borrowck/borrow-checker-lifetime-error-46471.stderr @@ -1,5 +1,5 @@ error[E0597]: `z` does not live long enough - --> $DIR/issue-46471-1.rs:4:9 + --> $DIR/borrow-checker-lifetime-error-46471.rs:5:9 | LL | let mut z = 0; | ----- binding `z` declared here diff --git a/tests/ui/issues/issue-11869.rs b/tests/ui/borrowck/string-borrowing-pattern-matching-11869.rs similarity index 81% rename from tests/ui/issues/issue-11869.rs rename to tests/ui/borrowck/string-borrowing-pattern-matching-11869.rs index dd752227bbec7..fe3d1bf6e8a30 100644 --- a/tests/ui/issues/issue-11869.rs +++ b/tests/ui/borrowck/string-borrowing-pattern-matching-11869.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/11869 //@ check-pass #![allow(dead_code)] diff --git a/tests/ui/issues/issue-9918.rs b/tests/ui/cast/u8-to-char-cast-9918.rs similarity index 59% rename from tests/ui/issues/issue-9918.rs rename to tests/ui/cast/u8-to-char-cast-9918.rs index 017e833aefb25..2b8be1f0fc9b9 100644 --- a/tests/ui/issues/issue-9918.rs +++ b/tests/ui/cast/u8-to-char-cast-9918.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/9918 //@ run-pass pub fn main() { diff --git a/tests/ui/issues/issue-36023.rs b/tests/ui/codegen/llvm-miscompile-metadata-invalidation-36023.rs similarity index 91% rename from tests/ui/issues/issue-36023.rs rename to tests/ui/codegen/llvm-miscompile-metadata-invalidation-36023.rs index 32e8af65c7d11..efa31a51881a4 100644 --- a/tests/ui/issues/issue-36023.rs +++ b/tests/ui/codegen/llvm-miscompile-metadata-invalidation-36023.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/36023 //@ run-pass #![allow(unused_variables)] use std::ops::Deref; diff --git a/tests/ui/issues/issue-18058.rs b/tests/ui/coherence/impl-coherence-error-for-undefined-type-18058.rs similarity index 63% rename from tests/ui/issues/issue-18058.rs rename to tests/ui/coherence/impl-coherence-error-for-undefined-type-18058.rs index cced66717e1bf..52baf9871c3bc 100644 --- a/tests/ui/issues/issue-18058.rs +++ b/tests/ui/coherence/impl-coherence-error-for-undefined-type-18058.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/18058 impl Undefined {} //~^ ERROR cannot find type `Undefined` in this scope diff --git a/tests/ui/issues/issue-18058.stderr b/tests/ui/coherence/impl-coherence-error-for-undefined-type-18058.stderr similarity index 78% rename from tests/ui/issues/issue-18058.stderr rename to tests/ui/coherence/impl-coherence-error-for-undefined-type-18058.stderr index c880bb0029192..07dce0b04fd53 100644 --- a/tests/ui/issues/issue-18058.stderr +++ b/tests/ui/coherence/impl-coherence-error-for-undefined-type-18058.stderr @@ -1,5 +1,5 @@ error[E0412]: cannot find type `Undefined` in this scope - --> $DIR/issue-18058.rs:1:6 + --> $DIR/impl-coherence-error-for-undefined-type-18058.rs:2:6 | LL | impl Undefined {} | ^^^^^^^^^ not found in this scope diff --git a/tests/ui/issues/issue-77919.rs b/tests/ui/const-generics/trait-resolution-error-with-const-generics-77919.rs similarity index 88% rename from tests/ui/issues/issue-77919.rs rename to tests/ui/const-generics/trait-resolution-error-with-const-generics-77919.rs index bf603314977f9..5ab443422df7a 100644 --- a/tests/ui/issues/issue-77919.rs +++ b/tests/ui/const-generics/trait-resolution-error-with-const-generics-77919.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/77919 fn main() { [1; >::VAL]; } diff --git a/tests/ui/issues/issue-77919.stderr b/tests/ui/const-generics/trait-resolution-error-with-const-generics-77919.stderr similarity index 84% rename from tests/ui/issues/issue-77919.stderr rename to tests/ui/const-generics/trait-resolution-error-with-const-generics-77919.stderr index dbbe70ff06990..bac8abf46dce9 100644 --- a/tests/ui/issues/issue-77919.stderr +++ b/tests/ui/const-generics/trait-resolution-error-with-const-generics-77919.stderr @@ -1,5 +1,5 @@ error[E0412]: cannot find type `PhantomData` in this scope - --> $DIR/issue-77919.rs:9:9 + --> $DIR/trait-resolution-error-with-const-generics-77919.rs:10:9 | LL | _n: PhantomData, | ^^^^^^^^^^^ not found in this scope @@ -10,7 +10,7 @@ LL + use std::marker::PhantomData; | error[E0412]: cannot find type `VAL` in this scope - --> $DIR/issue-77919.rs:11:63 + --> $DIR/trait-resolution-error-with-const-generics-77919.rs:12:63 | LL | impl TypeVal for Multiply where N: TypeVal {} | ^^^ not found in this scope @@ -21,7 +21,7 @@ LL | impl TypeVal for Multiply where N: TypeVal {} | +++++ error[E0046]: not all trait items implemented, missing: `VAL` - --> $DIR/issue-77919.rs:11:1 + --> $DIR/trait-resolution-error-with-const-generics-77919.rs:12:1 | LL | const VAL: T; | ------------ `VAL` from trait diff --git a/tests/ui/issues/issue-4734.rs b/tests/ui/drop/destructor-run-for-expression-4734.rs similarity index 94% rename from tests/ui/issues/issue-4734.rs rename to tests/ui/drop/destructor-run-for-expression-4734.rs index 58aa0179693e7..57971ee5ef76e 100644 --- a/tests/ui/issues/issue-4734.rs +++ b/tests/ui/drop/destructor-run-for-expression-4734.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/4734 //@ run-pass #![allow(dead_code)] // Ensures that destructors are run for expressions of the form "e;" where diff --git a/tests/ui/issues/issue-36116.rs b/tests/ui/generics/unnecessary-path-disambiguator-36116.rs similarity index 86% rename from tests/ui/issues/issue-36116.rs rename to tests/ui/generics/unnecessary-path-disambiguator-36116.rs index 2313e189aff7d..c2dab605f5923 100644 --- a/tests/ui/issues/issue-36116.rs +++ b/tests/ui/generics/unnecessary-path-disambiguator-36116.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/36116 // Unnecessary path disambiguator is ok //@ check-pass diff --git a/tests/ui/issues/issue-12909.rs b/tests/ui/inference/collection-type-copy-behavior-12909.rs similarity index 90% rename from tests/ui/issues/issue-12909.rs rename to tests/ui/inference/collection-type-copy-behavior-12909.rs index f2c33806aae88..83536e8875cad 100644 --- a/tests/ui/issues/issue-12909.rs +++ b/tests/ui/inference/collection-type-copy-behavior-12909.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/12909 //@ run-pass #![allow(unused_variables)] diff --git a/tests/ui/issues/auxiliary/issue-25185-2.rs b/tests/ui/issues/auxiliary/issue-25185-2.rs deleted file mode 100644 index 7ce3df255a331..0000000000000 --- a/tests/ui/issues/auxiliary/issue-25185-2.rs +++ /dev/null @@ -1,3 +0,0 @@ -extern crate issue_25185_1; - -pub use issue_25185_1::rust_dbg_extern_identity_u32; diff --git a/tests/ui/issues/issue-25185.rs b/tests/ui/issues/issue-25185.rs deleted file mode 100644 index 7dc06ad96df66..0000000000000 --- a/tests/ui/issues/issue-25185.rs +++ /dev/null @@ -1,12 +0,0 @@ -//@ run-pass -//@ aux-build:issue-25185-1.rs -//@ aux-build:issue-25185-2.rs - -extern crate issue_25185_2; - -fn main() { - let x = unsafe { - issue_25185_2::rust_dbg_extern_identity_u32(1) - }; - assert_eq!(x, 1); -} diff --git a/tests/ui/issues/issue-32655.rs b/tests/ui/issues/issue-32655.rs deleted file mode 100644 index f52e092312968..0000000000000 --- a/tests/ui/issues/issue-32655.rs +++ /dev/null @@ -1,19 +0,0 @@ -macro_rules! foo ( - () => ( - #[derive_Clone] //~ ERROR cannot find attribute `derive_Clone` in this scope - struct T; - ); -); - -macro_rules! bar ( - ($e:item) => ($e) -); - -foo!(); - -bar!( - #[derive_Clone] //~ ERROR cannot find attribute `derive_Clone` in this scope - struct S; -); - -fn main() {} diff --git a/tests/ui/issues/issue-32655.stderr b/tests/ui/issues/issue-32655.stderr deleted file mode 100644 index b8362499b2d0a..0000000000000 --- a/tests/ui/issues/issue-32655.stderr +++ /dev/null @@ -1,25 +0,0 @@ -error: cannot find attribute `derive_Clone` in this scope - --> $DIR/issue-32655.rs:3:11 - | -LL | #[derive_Clone] - | ^^^^^^^^^^^^ help: an attribute macro with a similar name exists: `derive_const` -... -LL | foo!(); - | ------ in this macro invocation - --> $SRC_DIR/core/src/macros/mod.rs:LL:COL - | - = note: similarly named attribute macro `derive_const` defined here - | - = note: this error originates in the macro `foo` (in Nightly builds, run with -Z macro-backtrace for more info) - -error: cannot find attribute `derive_Clone` in this scope - --> $DIR/issue-32655.rs:15:7 - | -LL | #[derive_Clone] - | ^^^^^^^^^^^^ help: an attribute macro with a similar name exists: `derive_const` - --> $SRC_DIR/core/src/macros/mod.rs:LL:COL - | - = note: similarly named attribute macro `derive_const` defined here - -error: aborting due to 2 previous errors - diff --git a/tests/ui/issues/issue-15673.rs b/tests/ui/iterators/iterator-type-inference-sum-15673.rs similarity index 76% rename from tests/ui/issues/issue-15673.rs rename to tests/ui/iterators/iterator-type-inference-sum-15673.rs index bb61c24627643..aee027927f2f7 100644 --- a/tests/ui/issues/issue-15673.rs +++ b/tests/ui/iterators/iterator-type-inference-sum-15673.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/15673 //@ run-pass #![allow(stable_features)] diff --git a/tests/ui/issues/auxiliary/issue-25185-1.rs b/tests/ui/linking/auxiliary/aux-25185-1.rs similarity index 100% rename from tests/ui/issues/auxiliary/issue-25185-1.rs rename to tests/ui/linking/auxiliary/aux-25185-1.rs diff --git a/tests/ui/linking/auxiliary/aux-25185-2.rs b/tests/ui/linking/auxiliary/aux-25185-2.rs new file mode 100644 index 0000000000000..96c73f623e4d0 --- /dev/null +++ b/tests/ui/linking/auxiliary/aux-25185-2.rs @@ -0,0 +1,3 @@ +extern crate aux_25185_1; + +pub use aux_25185_1::rust_dbg_extern_identity_u32; diff --git a/tests/ui/linking/rlib-to-dylib-native-deps-inclusion-25185.rs b/tests/ui/linking/rlib-to-dylib-native-deps-inclusion-25185.rs new file mode 100644 index 0000000000000..bbcfcb75106f6 --- /dev/null +++ b/tests/ui/linking/rlib-to-dylib-native-deps-inclusion-25185.rs @@ -0,0 +1,13 @@ +// https://github.com/rust-lang/rust/issues/25185 +//@ run-pass +//@ aux-build:aux-25185-1.rs +//@ aux-build:aux-25185-2.rs + +extern crate aux_25185_2; + +fn main() { + let x = unsafe { + aux_25185_2::rust_dbg_extern_identity_u32(1) + }; + assert_eq!(x, 1); +} diff --git a/tests/ui/issues/issue-26093.rs b/tests/ui/macros/invalid-assignment-in-macro-26093.rs similarity index 83% rename from tests/ui/issues/issue-26093.rs rename to tests/ui/macros/invalid-assignment-in-macro-26093.rs index c838515caf997..686a13a3eec36 100644 --- a/tests/ui/issues/issue-26093.rs +++ b/tests/ui/macros/invalid-assignment-in-macro-26093.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/26093 macro_rules! not_a_place { ($thing:expr) => { $thing = 42; diff --git a/tests/ui/issues/issue-26093.stderr b/tests/ui/macros/invalid-assignment-in-macro-26093.stderr similarity index 90% rename from tests/ui/issues/issue-26093.stderr rename to tests/ui/macros/invalid-assignment-in-macro-26093.stderr index 1a08d0fef4118..99f188c718361 100644 --- a/tests/ui/issues/issue-26093.stderr +++ b/tests/ui/macros/invalid-assignment-in-macro-26093.stderr @@ -1,5 +1,5 @@ error[E0070]: invalid left-hand side of assignment - --> $DIR/issue-26093.rs:3:16 + --> $DIR/invalid-assignment-in-macro-26093.rs:4:16 | LL | $thing = 42; | ^ @@ -13,7 +13,7 @@ LL | not_a_place!(99); = note: this error originates in the macro `not_a_place` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0067]: invalid left-hand side of assignment - --> $DIR/issue-26093.rs:5:16 + --> $DIR/invalid-assignment-in-macro-26093.rs:6:16 | LL | $thing += 42; | ^^ diff --git a/tests/ui/issues/issue-7519-match-unit-in-arg.rs b/tests/ui/pattern/unit-pattern-matching-in-function-argument-7519.rs similarity index 72% rename from tests/ui/issues/issue-7519-match-unit-in-arg.rs rename to tests/ui/pattern/unit-pattern-matching-in-function-argument-7519.rs index a7cea577b2248..7bfa9ee66256a 100644 --- a/tests/ui/issues/issue-7519-match-unit-in-arg.rs +++ b/tests/ui/pattern/unit-pattern-matching-in-function-argument-7519.rs @@ -2,6 +2,7 @@ /* #7519 ICE pattern matching unit in function argument +https://github.com/rust-lang/rust/issues/7519 */ fn foo(():()) { } diff --git a/tests/ui/issues/issue-22434.rs b/tests/ui/type-alias/missing-associated-type-in-trait-object-22434.rs similarity index 75% rename from tests/ui/issues/issue-22434.rs rename to tests/ui/type-alias/missing-associated-type-in-trait-object-22434.rs index d9f7b987c64fe..35b30374c15cc 100644 --- a/tests/ui/issues/issue-22434.rs +++ b/tests/ui/type-alias/missing-associated-type-in-trait-object-22434.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/22434 pub trait Foo { type A; } diff --git a/tests/ui/issues/issue-22434.stderr b/tests/ui/type-alias/missing-associated-type-in-trait-object-22434.stderr similarity index 84% rename from tests/ui/issues/issue-22434.stderr rename to tests/ui/type-alias/missing-associated-type-in-trait-object-22434.stderr index 172ae386c3e4a..73afefa5a1fdb 100644 --- a/tests/ui/issues/issue-22434.stderr +++ b/tests/ui/type-alias/missing-associated-type-in-trait-object-22434.stderr @@ -1,5 +1,5 @@ error[E0191]: the value of the associated type `A` in `Foo` must be specified - --> $DIR/issue-22434.rs:5:23 + --> $DIR/missing-associated-type-in-trait-object-22434.rs:6:23 | LL | type A; | ------ `A` defined here diff --git a/tests/ui/issues/issue-48276.rs b/tests/ui/typeck/incorrect-self-type-in-trait-impl-48276.rs similarity index 93% rename from tests/ui/issues/issue-48276.rs rename to tests/ui/typeck/incorrect-self-type-in-trait-impl-48276.rs index f55c056fa67d2..1cff20787550c 100644 --- a/tests/ui/issues/issue-48276.rs +++ b/tests/ui/typeck/incorrect-self-type-in-trait-impl-48276.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/48276 // Regression test for issue #48276 - ICE when self type does not match what is // required by a trait and regions are involved. diff --git a/tests/ui/issues/issue-48276.stderr b/tests/ui/typeck/incorrect-self-type-in-trait-impl-48276.stderr similarity index 83% rename from tests/ui/issues/issue-48276.stderr rename to tests/ui/typeck/incorrect-self-type-in-trait-impl-48276.stderr index 370905ee0dfb1..124dc45923764 100644 --- a/tests/ui/issues/issue-48276.stderr +++ b/tests/ui/typeck/incorrect-self-type-in-trait-impl-48276.stderr @@ -1,5 +1,5 @@ error[E0185]: method `from` has a `&self` declaration in the impl, but not in the trait - --> $DIR/issue-48276.rs:11:5 + --> $DIR/incorrect-self-type-in-trait-impl-48276.rs:12:5 | LL | fn from(a: A) -> Self; | ---------------------- trait method declared without `&self` @@ -8,7 +8,7 @@ LL | fn from(self: &'a Self) -> &'b str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `&self` used in impl error[E0185]: method `from` has a `&self` declaration in the impl, but not in the trait - --> $DIR/issue-48276.rs:20:5 + --> $DIR/incorrect-self-type-in-trait-impl-48276.rs:21:5 | LL | fn from(&self) -> B { | ^^^^^^^^^^^^^^^^^^^ `&self` used in impl @@ -16,7 +16,7 @@ LL | fn from(&self) -> B { = note: `from` from trait: `fn(T) -> Self` error[E0185]: method `from` has a `&self` declaration in the impl, but not in the trait - --> $DIR/issue-48276.rs:27:5 + --> $DIR/incorrect-self-type-in-trait-impl-48276.rs:28:5 | LL | fn from(&self) -> &'static str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `&self` used in impl diff --git a/tests/ui/issues/issue-32995-2.rs b/tests/ui/typeck/parenthesized-type-parameters-error-32995.rs similarity index 89% rename from tests/ui/issues/issue-32995-2.rs rename to tests/ui/typeck/parenthesized-type-parameters-error-32995.rs index e713a64d3f5a6..e0c2ab5f303a3 100644 --- a/tests/ui/issues/issue-32995-2.rs +++ b/tests/ui/typeck/parenthesized-type-parameters-error-32995.rs @@ -1,3 +1,4 @@ +// https://github.com/rust-lang/rust/issues/32995 fn main() { { fn f() {} } //~^ ERROR parenthesized type parameters may only be used with a `Fn` trait diff --git a/tests/ui/issues/issue-32995-2.stderr b/tests/ui/typeck/parenthesized-type-parameters-error-32995.stderr similarity index 79% rename from tests/ui/issues/issue-32995-2.stderr rename to tests/ui/typeck/parenthesized-type-parameters-error-32995.stderr index 6c2d772a23332..590cdcdb43bc3 100644 --- a/tests/ui/issues/issue-32995-2.stderr +++ b/tests/ui/typeck/parenthesized-type-parameters-error-32995.stderr @@ -1,17 +1,17 @@ error[E0214]: parenthesized type parameters may only be used with a `Fn` trait - --> $DIR/issue-32995-2.rs:2:22 + --> $DIR/parenthesized-type-parameters-error-32995.rs:3:22 | LL | { fn f() {} } | ^^^^^^^^ only `Fn` traits may use parentheses error[E0214]: parenthesized type parameters may only be used with a `Fn` trait - --> $DIR/issue-32995-2.rs:5:29 + --> $DIR/parenthesized-type-parameters-error-32995.rs:6:29 | LL | { fn f() -> impl ::std::marker()::Send { } } | ^^^^^^^^ only `Fn` traits may use parentheses error[E0214]: parenthesized type parameters may only be used with a `Fn` trait - --> $DIR/issue-32995-2.rs:12:13 + --> $DIR/parenthesized-type-parameters-error-32995.rs:13:13 | LL | impl ::std::marker()::Copy for X {} | ^^^^^^^^ only `Fn` traits may use parentheses diff --git a/tests/ui/issues/issue-18685.rs b/tests/ui/unboxed-closures/self-param-space-conflict-in-unboxed-closure-18685.rs similarity index 85% rename from tests/ui/issues/issue-18685.rs rename to tests/ui/unboxed-closures/self-param-space-conflict-in-unboxed-closure-18685.rs index 3dab341f615c7..38cf26c277709 100644 --- a/tests/ui/issues/issue-18685.rs +++ b/tests/ui/unboxed-closures/self-param-space-conflict-in-unboxed-closure-18685.rs @@ -1,8 +1,8 @@ +// https://github.com/rust-lang/rust/issues/18685 //@ run-pass // Test that the self param space is not used in a conflicting // manner by unboxed closures within a default method on a trait - trait Tr { fn foo(&self); From f84c62112a3f86ac0e0916db07d70b391ce7db30 Mon Sep 17 00:00:00 2001 From: Tshepang Mbambo Date: Sun, 3 Aug 2025 07:18:26 +0200 Subject: [PATCH 080/118] there is still no official policy --- src/doc/rustc-dev-guide/src/crates-io.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/doc/rustc-dev-guide/src/crates-io.md b/src/doc/rustc-dev-guide/src/crates-io.md index 4431585a2f02b..677b1fc03134e 100644 --- a/src/doc/rustc-dev-guide/src/crates-io.md +++ b/src/doc/rustc-dev-guide/src/crates-io.md @@ -11,7 +11,7 @@ you should avoid adding dependencies to the compiler for several reasons: - The dependency may have transitive dependencies that have one of the above problems. - + Note that there is no official policy for vetting new dependencies to the compiler. Decisions are made on a case-by-case basis, during code review. From b373cb100642ae0f154090ae57c74cc834ec0327 Mon Sep 17 00:00:00 2001 From: Hmikihiro <34ttrweoewiwe28@gmail.com> Date: Sun, 3 Aug 2025 15:01:47 +0900 Subject: [PATCH 081/118] Migrate `generate_trait_from_impl` assist to use `SyntaxEditor` --- .../src/handlers/generate_trait_from_impl.rs | 69 ++++++++++--------- 1 file changed, 35 insertions(+), 34 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index dc3dc73701f3a..56500cf068024 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -2,12 +2,8 @@ use crate::assist_context::{AssistContext, Assists}; use ide_db::assists::AssistId; use syntax::{ AstNode, SyntaxKind, T, - ast::{ - self, HasGenericParams, HasName, HasVisibility, - edit_in_place::{HasVisibilityEdit, Indent}, - make, - }, - ted::{self, Position}, + ast::{self, HasGenericParams, HasName, HasVisibility, edit_in_place::Indent, make}, + syntax_editor::{Position, SyntaxEditor}, }; // NOTES : @@ -88,8 +84,8 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ return None; } - let assoc_items = impl_ast.assoc_item_list()?; - let first_element = assoc_items.assoc_items().next(); + let impl_assoc_items = impl_ast.assoc_item_list()?; + let first_element = impl_assoc_items.assoc_items().next(); first_element.as_ref()?; let impl_name = impl_ast.self_ty()?; @@ -99,20 +95,16 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ "Generate trait from impl", impl_ast.syntax().text_range(), |builder| { - let impl_ast = builder.make_mut(impl_ast); - let trait_items = assoc_items.clone_for_update(); - let impl_items = builder.make_mut(assoc_items); - let impl_name = builder.make_mut(impl_name); - - trait_items.assoc_items().for_each(|item| { - strip_body(&item); - remove_items_visibility(&item); - }); - - impl_items.assoc_items().for_each(|item| { - remove_items_visibility(&item); - }); - + let trait_items: ast::AssocItemList = { + let trait_items = impl_assoc_items.clone_subtree(); + let mut trait_items_editor = SyntaxEditor::new(trait_items.syntax().clone()); + + trait_items.assoc_items().for_each(|item| { + strip_body(&mut trait_items_editor, &item); + remove_items_visibility(&mut trait_items_editor, &item); + }); + ast::AssocItemList::cast(trait_items_editor.finish().new_root().clone()).unwrap() + }; let trait_ast = make::trait_( false, "NewTrait", @@ -130,6 +122,7 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ trait_name_ref.syntax().clone().into(), make::tokens::single_space().into(), make::token(T![for]).into(), + make::tokens::single_space().into(), ]; if let Some(params) = impl_ast.generic_param_list() { @@ -137,10 +130,15 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ elements.insert(1, gen_args.syntax().clone().into()); } - ted::insert_all(Position::before(impl_name.syntax()), elements); + let mut editor = builder.make_editor(impl_ast.syntax()); + impl_assoc_items.assoc_items().for_each(|item| { + remove_items_visibility(&mut editor, &item); + }); + + editor.insert_all(Position::before(impl_name.syntax()), elements); // Insert trait before TraitImpl - ted::insert_all_raw( + editor.insert_all( Position::before(impl_ast.syntax()), vec![ trait_ast.syntax().clone().into(), @@ -150,11 +148,12 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ // Link the trait name & trait ref names together as a placeholder snippet group if let Some(cap) = ctx.config.snippet_cap { - builder.add_placeholder_snippet_group( - cap, - vec![trait_name.syntax().clone(), trait_name_ref.syntax().clone()], - ); + let placeholder = builder.make_placeholder_snippet(cap); + editor.add_annotation(trait_name.syntax(), placeholder); + editor.add_annotation(trait_name_ref.syntax(), placeholder); } + + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ); @@ -162,19 +161,21 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ } /// `E0449` Trait items always share the visibility of their trait -fn remove_items_visibility(item: &ast::AssocItem) { +fn remove_items_visibility(editor: &mut SyntaxEditor, item: &ast::AssocItem) { if let Some(has_vis) = ast::AnyHasVisibility::cast(item.syntax().clone()) { if let Some(vis) = has_vis.visibility() && let Some(token) = vis.syntax().next_sibling_or_token() && token.kind() == SyntaxKind::WHITESPACE { - ted::remove(token); + editor.delete(token); + } + if let Some(vis) = has_vis.visibility() { + editor.delete(vis.syntax()); } - has_vis.set_visibility(None); } } -fn strip_body(item: &ast::AssocItem) { +fn strip_body(editor: &mut SyntaxEditor, item: &ast::AssocItem) { if let ast::AssocItem::Fn(f) = item && let Some(body) = f.body() { @@ -183,10 +184,10 @@ fn strip_body(item: &ast::AssocItem) { if let Some(prev) = body.syntax().prev_sibling_or_token() && prev.kind() == SyntaxKind::WHITESPACE { - ted::remove(prev); + editor.delete(prev); } - ted::replace(body.syntax(), make::tokens::semicolon()); + editor.replace(body.syntax(), make::tokens::semicolon()); }; } From df524163be45ba32cd019529771c41605a11abe7 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 1 Aug 2025 08:10:16 +1000 Subject: [PATCH 082/118] Mark `Printer` methods as unreachable where appropriate. This helps me understand the structure of the code a lot. If any of these are actually reachable, we can put the old code back, add a new test case, and we will have improved our test coverage. --- compiler/rustc_const_eval/src/util/type_name.rs | 2 +- compiler/rustc_lint/src/context.rs | 16 ++++++++-------- compiler/rustc_symbol_mangling/src/legacy.rs | 2 +- .../src/error_reporting/infer/mod.rs | 11 +++++++---- 4 files changed, 17 insertions(+), 14 deletions(-) diff --git a/compiler/rustc_const_eval/src/util/type_name.rs b/compiler/rustc_const_eval/src/util/type_name.rs index e8f2728a7728f..1a509048262e7 100644 --- a/compiler/rustc_const_eval/src/util/type_name.rs +++ b/compiler/rustc_const_eval/src/util/type_name.rs @@ -18,7 +18,7 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { } fn print_region(&mut self, _region: ty::Region<'_>) -> Result<(), PrintError> { - Ok(()) + unreachable!(); // because `::should_print_region` returns false } fn print_type(&mut self, ty: Ty<'tcx>) -> Result<(), PrintError> { diff --git a/compiler/rustc_lint/src/context.rs b/compiler/rustc_lint/src/context.rs index b694d3dd49b79..70feb49d0dd14 100644 --- a/compiler/rustc_lint/src/context.rs +++ b/compiler/rustc_lint/src/context.rs @@ -756,22 +756,22 @@ impl<'tcx> LateContext<'tcx> { } fn print_region(&mut self, _region: ty::Region<'_>) -> Result<(), PrintError> { - Ok(()) + unreachable!(); // because `path_generic_args` ignores the `GenericArgs` } fn print_type(&mut self, _ty: Ty<'tcx>) -> Result<(), PrintError> { - Ok(()) + unreachable!(); // because `path_generic_args` ignores the `GenericArgs` } fn print_dyn_existential( &mut self, _predicates: &'tcx ty::List>, ) -> Result<(), PrintError> { - Ok(()) + unreachable!(); // because `path_generic_args` ignores the `GenericArgs` } fn print_const(&mut self, _ct: ty::Const<'tcx>) -> Result<(), PrintError> { - Ok(()) + unreachable!(); // because `path_generic_args` ignores the `GenericArgs` } fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { @@ -784,10 +784,10 @@ impl<'tcx> LateContext<'tcx> { self_ty: Ty<'tcx>, trait_ref: Option>, ) -> Result<(), PrintError> { - if trait_ref.is_none() { - if let ty::Adt(def, args) = self_ty.kind() { - return self.print_def_path(def.did(), args); - } + if trait_ref.is_none() + && let ty::Adt(def, args) = self_ty.kind() + { + return self.print_def_path(def.did(), args); } // This shouldn't ever be needed, but just in case: diff --git a/compiler/rustc_symbol_mangling/src/legacy.rs b/compiler/rustc_symbol_mangling/src/legacy.rs index 12d1de463136a..b75e793dfd3ad 100644 --- a/compiler/rustc_symbol_mangling/src/legacy.rs +++ b/compiler/rustc_symbol_mangling/src/legacy.rs @@ -236,7 +236,7 @@ impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> { } fn print_region(&mut self, _region: ty::Region<'_>) -> Result<(), PrintError> { - Ok(()) + unreachable!(); // because `::should_print_region` returns false } fn print_type(&mut self, ty: Ty<'tcx>) -> Result<(), PrintError> { diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs index b9acadc406e9c..c158cce965736 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs @@ -235,28 +235,29 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } fn print_region(&mut self, _region: ty::Region<'_>) -> Result<(), PrintError> { - Err(fmt::Error) + unreachable!(); // because `path_generic_args` ignores the `GenericArgs` } fn print_type(&mut self, _ty: Ty<'tcx>) -> Result<(), PrintError> { - Err(fmt::Error) + unreachable!(); // because `path_generic_args` ignores the `GenericArgs` } fn print_dyn_existential( &mut self, _predicates: &'tcx ty::List>, ) -> Result<(), PrintError> { - Err(fmt::Error) + unreachable!(); // because `path_generic_args` ignores the `GenericArgs` } fn print_const(&mut self, _ct: ty::Const<'tcx>) -> Result<(), PrintError> { - Err(fmt::Error) + unreachable!(); // because `path_generic_args` ignores the `GenericArgs` } fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { self.segments = vec![self.tcx.crate_name(cnum)]; Ok(()) } + fn path_qualified( &mut self, _self_ty: Ty<'tcx>, @@ -274,6 +275,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ) -> Result<(), PrintError> { Err(fmt::Error) } + fn path_append( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, @@ -283,6 +285,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { self.segments.push(disambiguated_data.as_sym(true)); Ok(()) } + fn path_generic_args( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, From bd0a308ca28d599d0dd798e925d1cb57bc96e616 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 1 Aug 2025 08:33:57 +1000 Subject: [PATCH 083/118] Inline and remove two `FmtPrinter` methods. They each have a single call site. --- compiler/rustc_middle/src/ty/print/pretty.rs | 46 +++++--------------- 1 file changed, 12 insertions(+), 34 deletions(-) diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 71eac294f1554..38db784dd42eb 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -2456,7 +2456,12 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { where T: Print<'tcx, Self> + TypeFoldable>, { - self.pretty_print_in_binder(value) + let old_region_index = self.region_index; + let (new_value, _) = self.name_all_regions(value, WrapBinderMode::ForAll)?; + new_value.print(self)?; + self.region_index = old_region_index; + self.binder_depth -= 1; + Ok(()) } fn wrap_binder Result<(), PrintError>>( @@ -2468,7 +2473,12 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { where T: TypeFoldable>, { - self.pretty_wrap_binder(value, mode, f) + let old_region_index = self.region_index; + let (new_value, _) = self.name_all_regions(value, mode)?; + f(&new_value, self)?; + self.region_index = old_region_index; + self.binder_depth -= 1; + Ok(()) } fn typed_value( @@ -2855,38 +2865,6 @@ impl<'tcx> FmtPrinter<'_, 'tcx> { Ok((new_value, map)) } - pub fn pretty_print_in_binder( - &mut self, - value: &ty::Binder<'tcx, T>, - ) -> Result<(), fmt::Error> - where - T: Print<'tcx, Self> + TypeFoldable>, - { - let old_region_index = self.region_index; - let (new_value, _) = self.name_all_regions(value, WrapBinderMode::ForAll)?; - new_value.print(self)?; - self.region_index = old_region_index; - self.binder_depth -= 1; - Ok(()) - } - - pub fn pretty_wrap_binder Result<(), fmt::Error>>( - &mut self, - value: &ty::Binder<'tcx, T>, - mode: WrapBinderMode, - f: C, - ) -> Result<(), fmt::Error> - where - T: TypeFoldable>, - { - let old_region_index = self.region_index; - let (new_value, _) = self.name_all_regions(value, mode)?; - f(&new_value, self)?; - self.region_index = old_region_index; - self.binder_depth -= 1; - Ok(()) - } - fn prepare_region_info(&mut self, value: &ty::Binder<'tcx, T>) where T: TypeFoldable>, From e7d6a0776b06673b5a6258bd7476f1f39ab86756 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 1 Aug 2025 09:18:35 +1000 Subject: [PATCH 084/118] Remove `type_name::AbsolutePathPrinter::comma_sep`. It's equivalent to the default `PrettyPrinter::comma_sep`. --- compiler/rustc_const_eval/src/util/type_name.rs | 15 +-------------- compiler/rustc_symbol_mangling/src/legacy.rs | 2 ++ 2 files changed, 3 insertions(+), 14 deletions(-) diff --git a/compiler/rustc_const_eval/src/util/type_name.rs b/compiler/rustc_const_eval/src/util/type_name.rs index 1a509048262e7..2e8b9ea010e91 100644 --- a/compiler/rustc_const_eval/src/util/type_name.rs +++ b/compiler/rustc_const_eval/src/util/type_name.rs @@ -4,7 +4,7 @@ use rustc_data_structures::intern::Interned; use rustc_hir::def_id::CrateNum; use rustc_hir::definitions::DisambiguatedDefPathData; use rustc_middle::bug; -use rustc_middle::ty::print::{PrettyPrinter, Print, PrintError, Printer}; +use rustc_middle::ty::print::{PrettyPrinter, PrintError, Printer}; use rustc_middle::ty::{self, GenericArg, GenericArgKind, Ty, TyCtxt}; struct AbsolutePathPrinter<'tcx> { @@ -138,19 +138,6 @@ impl<'tcx> PrettyPrinter<'tcx> for AbsolutePathPrinter<'tcx> { fn should_print_region(&self, _region: ty::Region<'_>) -> bool { false } - fn comma_sep(&mut self, mut elems: impl Iterator) -> Result<(), PrintError> - where - T: Print<'tcx, Self>, - { - if let Some(first) = elems.next() { - first.print(self)?; - for elem in elems { - self.path.push_str(", "); - elem.print(self)?; - } - } - Ok(()) - } fn generic_delimiters( &mut self, diff --git a/compiler/rustc_symbol_mangling/src/legacy.rs b/compiler/rustc_symbol_mangling/src/legacy.rs index b75e793dfd3ad..a3bc650427b76 100644 --- a/compiler/rustc_symbol_mangling/src/legacy.rs +++ b/compiler/rustc_symbol_mangling/src/legacy.rs @@ -460,6 +460,8 @@ impl<'tcx> PrettyPrinter<'tcx> for SymbolPrinter<'tcx> { fn should_print_region(&self, _region: ty::Region<'_>) -> bool { false } + + // Identical to `PrettyPrinter::comma_sep` except there is no space after each comma. fn comma_sep(&mut self, mut elems: impl Iterator) -> Result<(), PrintError> where T: Print<'tcx, Self>, From 1698c8e322d0cd95aea94b85ef098e5ccfe3c856 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 1 Aug 2025 10:41:11 +1000 Subject: [PATCH 085/118] Rename `Printer` variables. Currently they are mostly named `cx`, which is a terrible name for a type that impls `Printer`/`PrettyPrinter`, and is easy to confuse with other types like `TyCtxt`. This commit changes them to `p`. A couple of existing `p` variables had to be renamed to make way. --- .../rustc_borrowck/src/diagnostics/mod.rs | 16 +- .../rustc_const_eval/src/interpret/operand.rs | 13 +- .../rustc_const_eval/src/util/type_name.rs | 6 +- compiler/rustc_lint/src/context.rs | 6 +- compiler/rustc_middle/src/mir/pretty.rs | 34 +-- compiler/rustc_middle/src/ty/error.rs | 24 +-- compiler/rustc_middle/src/ty/instance.rs | 6 +- compiler/rustc_middle/src/ty/print/mod.rs | 38 ++-- compiler/rustc_middle/src/ty/print/pretty.rs | 196 +++++++++--------- .../rustc_middle/src/ty/structural_impls.rs | 14 +- compiler/rustc_symbol_mangling/src/legacy.rs | 60 +++--- compiler/rustc_symbol_mangling/src/v0.rs | 74 +++---- .../src/error_reporting/infer/mod.rs | 4 +- .../error_reporting/infer/need_type_info.rs | 51 +++-- .../nice_region_error/placeholder_error.rs | 8 +- .../error_reporting/infer/note_and_explain.rs | 4 +- .../src/error_reporting/traits/overflow.rs | 6 +- 17 files changed, 279 insertions(+), 281 deletions(-) diff --git a/compiler/rustc_borrowck/src/diagnostics/mod.rs b/compiler/rustc_borrowck/src/diagnostics/mod.rs index 56fdaf1c724ac..34bed375cb965 100644 --- a/compiler/rustc_borrowck/src/diagnostics/mod.rs +++ b/compiler/rustc_borrowck/src/diagnostics/mod.rs @@ -613,7 +613,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { /// Return the name of the provided `Ty` (that must be a reference) with a synthesized lifetime /// name where required. pub(super) fn get_name_for_ty(&self, ty: Ty<'tcx>, counter: usize) -> String { - let mut printer = ty::print::FmtPrinter::new(self.infcx.tcx, Namespace::TypeNS); + let mut p = ty::print::FmtPrinter::new(self.infcx.tcx, Namespace::TypeNS); // We need to add synthesized lifetimes where appropriate. We do // this by hooking into the pretty printer and telling it to label the @@ -624,19 +624,19 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { | ty::RePlaceholder(ty::PlaceholderRegion { bound: ty::BoundRegion { kind: br, .. }, .. - }) => printer.region_highlight_mode.highlighting_bound_region(br, counter), + }) => p.region_highlight_mode.highlighting_bound_region(br, counter), _ => {} } } - ty.print(&mut printer).unwrap(); - printer.into_buffer() + ty.print(&mut p).unwrap(); + p.into_buffer() } /// Returns the name of the provided `Ty` (that must be a reference)'s region with a /// synthesized lifetime name where required. pub(super) fn get_region_name_for_ty(&self, ty: Ty<'tcx>, counter: usize) -> String { - let mut printer = ty::print::FmtPrinter::new(self.infcx.tcx, Namespace::TypeNS); + let mut p = ty::print::FmtPrinter::new(self.infcx.tcx, Namespace::TypeNS); let region = if let ty::Ref(region, ..) = ty.kind() { match region.kind() { @@ -644,7 +644,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { | ty::RePlaceholder(ty::PlaceholderRegion { bound: ty::BoundRegion { kind: br, .. }, .. - }) => printer.region_highlight_mode.highlighting_bound_region(br, counter), + }) => p.region_highlight_mode.highlighting_bound_region(br, counter), _ => {} } region @@ -652,8 +652,8 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { bug!("ty for annotation of borrow region is not a reference"); }; - region.print(&mut printer).unwrap(); - printer.into_buffer() + region.print(&mut p).unwrap(); + p.into_buffer() } /// Add a note to region errors and borrow explanations when higher-ranked regions in predicates diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index 4171345790864..1454180907001 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -188,18 +188,18 @@ pub struct ImmTy<'tcx, Prov: Provenance = CtfeProvenance> { impl std::fmt::Display for ImmTy<'_, Prov> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { /// Helper function for printing a scalar to a FmtPrinter - fn p<'a, 'tcx, Prov: Provenance>( - cx: &mut FmtPrinter<'a, 'tcx>, + fn print_scalar<'a, 'tcx, Prov: Provenance>( + p: &mut FmtPrinter<'a, 'tcx>, s: Scalar, ty: Ty<'tcx>, ) -> Result<(), std::fmt::Error> { match s { - Scalar::Int(int) => cx.pretty_print_const_scalar_int(int, ty, true), + Scalar::Int(int) => p.pretty_print_const_scalar_int(int, ty, true), Scalar::Ptr(ptr, _sz) => { // Just print the ptr value. `pretty_print_const_scalar_ptr` would also try to // print what is points to, which would fail since it has no access to the local // memory. - cx.pretty_print_const_pointer(ptr, ty) + p.pretty_print_const_pointer(ptr, ty) } } } @@ -207,8 +207,9 @@ impl std::fmt::Display for ImmTy<'_, Prov> { match self.imm { Immediate::Scalar(s) => { if let Some(ty) = tcx.lift(self.layout.ty) { - let s = - FmtPrinter::print_string(tcx, Namespace::ValueNS, |cx| p(cx, s, ty))?; + let s = FmtPrinter::print_string(tcx, Namespace::ValueNS, |p| { + print_scalar(p, s, ty) + })?; f.write_str(&s)?; return Ok(()); } diff --git a/compiler/rustc_const_eval/src/util/type_name.rs b/compiler/rustc_const_eval/src/util/type_name.rs index 2e8b9ea010e91..400ba23ae5f9b 100644 --- a/compiler/rustc_const_eval/src/util/type_name.rs +++ b/compiler/rustc_const_eval/src/util/type_name.rs @@ -166,7 +166,7 @@ impl Write for AbsolutePathPrinter<'_> { } pub fn type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> String { - let mut printer = AbsolutePathPrinter { tcx, path: String::new() }; - printer.print_type(ty).unwrap(); - printer.path + let mut p = AbsolutePathPrinter { tcx, path: String::new() }; + p.print_type(ty).unwrap(); + p.path } diff --git a/compiler/rustc_lint/src/context.rs b/compiler/rustc_lint/src/context.rs index 70feb49d0dd14..7e35d4d142bd8 100644 --- a/compiler/rustc_lint/src/context.rs +++ b/compiler/rustc_lint/src/context.rs @@ -854,9 +854,9 @@ impl<'tcx> LateContext<'tcx> { } } - let mut printer = AbsolutePathPrinter { tcx: self.tcx, path: vec![] }; - printer.print_def_path(def_id, &[]).unwrap(); - printer.path + let mut p = AbsolutePathPrinter { tcx: self.tcx, path: vec![] }; + p.print_def_path(def_id, &[]).unwrap(); + p.path } /// Returns the associated type `name` for `self_ty` as an implementation of `trait_id`. diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index 809cdb329f79e..f8a48005a7363 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -1197,8 +1197,8 @@ impl<'tcx> Debug for Rvalue<'tcx> { ty::tls::with(|tcx| { let variant_def = &tcx.adt_def(adt_did).variant(variant); let args = tcx.lift(args).expect("could not lift for printing"); - let name = FmtPrinter::print_string(tcx, Namespace::ValueNS, |cx| { - cx.print_def_path(variant_def.def_id, args) + let name = FmtPrinter::print_string(tcx, Namespace::ValueNS, |p| { + p.print_def_path(variant_def.def_id, args) })?; match variant_def.ctor_kind() { @@ -1473,9 +1473,9 @@ impl<'tcx> Visitor<'tcx> for ExtraComments<'tcx> { }; let fmt_valtree = |cv: &ty::Value<'tcx>| { - let mut cx = FmtPrinter::new(self.tcx, Namespace::ValueNS); - cx.pretty_print_const_valtree(*cv, /*print_ty*/ true).unwrap(); - cx.into_buffer() + let mut p = FmtPrinter::new(self.tcx, Namespace::ValueNS); + p.pretty_print_const_valtree(*cv, /*print_ty*/ true).unwrap(); + p.into_buffer() }; let val = match const_ { @@ -1967,10 +1967,10 @@ fn pretty_print_const_value_tcx<'tcx>( .expect("destructed mir constant of adt without variant idx"); let variant_def = &def.variant(variant_idx); let args = tcx.lift(args).unwrap(); - let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS); - cx.print_alloc_ids = true; - cx.print_value_path(variant_def.def_id, args)?; - fmt.write_str(&cx.into_buffer())?; + let mut p = FmtPrinter::new(tcx, Namespace::ValueNS); + p.print_alloc_ids = true; + p.print_value_path(variant_def.def_id, args)?; + fmt.write_str(&p.into_buffer())?; match variant_def.ctor_kind() { Some(CtorKind::Const) => {} @@ -2001,18 +2001,18 @@ fn pretty_print_const_value_tcx<'tcx>( } } (ConstValue::Scalar(scalar), _) => { - let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS); - cx.print_alloc_ids = true; + let mut p = FmtPrinter::new(tcx, Namespace::ValueNS); + p.print_alloc_ids = true; let ty = tcx.lift(ty).unwrap(); - cx.pretty_print_const_scalar(scalar, ty)?; - fmt.write_str(&cx.into_buffer())?; + p.pretty_print_const_scalar(scalar, ty)?; + fmt.write_str(&p.into_buffer())?; return Ok(()); } (ConstValue::ZeroSized, ty::FnDef(d, s)) => { - let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS); - cx.print_alloc_ids = true; - cx.print_value_path(*d, s)?; - fmt.write_str(&cx.into_buffer())?; + let mut p = FmtPrinter::new(tcx, Namespace::ValueNS); + p.print_alloc_ids = true; + p.print_value_path(*d, s)?; + fmt.write_str(&p.into_buffer())?; return Ok(()); } // FIXME(oli-obk): also pretty print arrays and other aggregate constants by reading diff --git a/compiler/rustc_middle/src/ty/error.rs b/compiler/rustc_middle/src/ty/error.rs index 13723874ad3a1..c24dc983d2160 100644 --- a/compiler/rustc_middle/src/ty/error.rs +++ b/compiler/rustc_middle/src/ty/error.rs @@ -213,13 +213,13 @@ impl<'tcx> Ty<'tcx> { } impl<'tcx> TyCtxt<'tcx> { - pub fn string_with_limit(self, p: T, length_limit: usize) -> String + pub fn string_with_limit(self, t: T, length_limit: usize) -> String where T: Copy + for<'a, 'b> Lift, Lifted: Print<'b, FmtPrinter<'a, 'b>>>, { let mut type_limit = 50; - let regular = FmtPrinter::print_string(self, hir::def::Namespace::TypeNS, |cx| { - self.lift(p).expect("could not lift for printing").print(cx) + let regular = FmtPrinter::print_string(self, hir::def::Namespace::TypeNS, |p| { + self.lift(t).expect("could not lift for printing").print(p) }) .expect("could not write to `String`"); if regular.len() <= length_limit { @@ -229,16 +229,16 @@ impl<'tcx> TyCtxt<'tcx> { loop { // Look for the longest properly trimmed path that still fits in length_limit. short = with_forced_trimmed_paths!({ - let mut cx = FmtPrinter::new_with_limit( + let mut p = FmtPrinter::new_with_limit( self, hir::def::Namespace::TypeNS, rustc_session::Limit(type_limit), ); - self.lift(p) + self.lift(t) .expect("could not lift for printing") - .print(&mut cx) + .print(&mut p) .expect("could not print type"); - cx.into_buffer() + p.into_buffer() }); if short.len() <= length_limit || type_limit == 0 { break; @@ -252,12 +252,12 @@ impl<'tcx> TyCtxt<'tcx> { /// `tcx.short_string(ty, diag.long_ty_path())`. The diagnostic itself is the one that keeps /// the existence of a "long type" anywhere in the diagnostic, so the note telling the user /// where we wrote the file to is only printed once. - pub fn short_string(self, p: T, path: &mut Option) -> String + pub fn short_string(self, t: T, path: &mut Option) -> String where T: Copy + Hash + for<'a, 'b> Lift, Lifted: Print<'b, FmtPrinter<'a, 'b>>>, { - let regular = FmtPrinter::print_string(self, hir::def::Namespace::TypeNS, |cx| { - self.lift(p).expect("could not lift for printing").print(cx) + let regular = FmtPrinter::print_string(self, hir::def::Namespace::TypeNS, |p| { + self.lift(t).expect("could not lift for printing").print(p) }) .expect("could not write to `String`"); @@ -270,13 +270,13 @@ impl<'tcx> TyCtxt<'tcx> { if regular.len() <= width * 2 / 3 { return regular; } - let short = self.string_with_limit(p, length_limit); + let short = self.string_with_limit(t, length_limit); if regular == short { return regular; } // Ensure we create an unique file for the type passed in when we create a file. let mut s = DefaultHasher::new(); - p.hash(&mut s); + t.hash(&mut s); let hash = s.finish(); *path = Some(path.take().unwrap_or_else(|| { self.output_filenames(()).temp_path_for_diagnostic(&format!("long-type-{hash}.txt")) diff --git a/compiler/rustc_middle/src/ty/instance.rs b/compiler/rustc_middle/src/ty/instance.rs index eb35a95203261..16873b6ee21ad 100644 --- a/compiler/rustc_middle/src/ty/instance.rs +++ b/compiler/rustc_middle/src/ty/instance.rs @@ -397,13 +397,13 @@ pub fn fmt_instance( ty::tls::with(|tcx| { let args = tcx.lift(instance.args).expect("could not lift for printing"); - let mut cx = if let Some(type_length) = type_length { + let mut p = if let Some(type_length) = type_length { FmtPrinter::new_with_limit(tcx, Namespace::ValueNS, type_length) } else { FmtPrinter::new(tcx, Namespace::ValueNS) }; - cx.print_def_path(instance.def_id(), args)?; - let s = cx.into_buffer(); + p.print_def_path(instance.def_id(), args)?; + let s = p.into_buffer(); f.write_str(&s) })?; diff --git a/compiler/rustc_middle/src/ty/print/mod.rs b/compiler/rustc_middle/src/ty/print/mod.rs index 9172c5d3ab752..1fee9d945f65c 100644 --- a/compiler/rustc_middle/src/ty/print/mod.rs +++ b/compiler/rustc_middle/src/ty/print/mod.rs @@ -18,7 +18,7 @@ use super::Lift; pub type PrintError = std::fmt::Error; pub trait Print<'tcx, P> { - fn print(&self, cx: &mut P) -> Result<(), PrintError>; + fn print(&self, p: &mut P) -> Result<(), PrintError>; } /// Interface for outputting user-facing "type-system entities" @@ -148,7 +148,7 @@ pub trait Printer<'tcx>: Sized { && args.len() > parent_args.len() { return self.path_generic_args( - |cx| cx.print_def_path(def_id, parent_args), + |p| p.print_def_path(def_id, parent_args), &args[..parent_args.len() + 1][..1], ); } else { @@ -170,7 +170,7 @@ pub trait Printer<'tcx>: Sized { if !generics.is_own_empty() && args.len() >= generics.count() { let args = generics.own_args_no_defaults(self.tcx(), args); return self.path_generic_args( - |cx| cx.print_def_path(def_id, parent_args), + |p| p.print_def_path(def_id, parent_args), args, ); } @@ -186,16 +186,16 @@ pub trait Printer<'tcx>: Sized { } self.path_append( - |cx: &mut Self| { + |p: &mut Self| { if trait_qualify_parent { let trait_ref = ty::TraitRef::new( - cx.tcx(), + p.tcx(), parent_def_id, parent_args.iter().copied(), ); - cx.path_qualified(trait_ref.self_ty(), Some(trait_ref)) + p.path_qualified(trait_ref.self_ty(), Some(trait_ref)) } else { - cx.print_def_path(parent_def_id, parent_args) + p.print_def_path(parent_def_id, parent_args) } }, &key.disambiguated_data, @@ -237,7 +237,7 @@ pub trait Printer<'tcx>: Sized { // trait-type, then fallback to a format that identifies // the module more clearly. self.path_append_impl( - |cx| cx.print_def_path(parent_def_id, &[]), + |p| p.print_def_path(parent_def_id, &[]), &key.disambiguated_data, self_ty, impl_trait_ref, @@ -312,26 +312,26 @@ pub fn characteristic_def_id_of_type(ty: Ty<'_>) -> Option { } impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for ty::Region<'tcx> { - fn print(&self, cx: &mut P) -> Result<(), PrintError> { - cx.print_region(*self) + fn print(&self, p: &mut P) -> Result<(), PrintError> { + p.print_region(*self) } } impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for Ty<'tcx> { - fn print(&self, cx: &mut P) -> Result<(), PrintError> { - cx.print_type(*self) + fn print(&self, p: &mut P) -> Result<(), PrintError> { + p.print_type(*self) } } impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for &'tcx ty::List> { - fn print(&self, cx: &mut P) -> Result<(), PrintError> { - cx.print_dyn_existential(self) + fn print(&self, p: &mut P) -> Result<(), PrintError> { + p.print_dyn_existential(self) } } impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for ty::Const<'tcx> { - fn print(&self, cx: &mut P) -> Result<(), PrintError> { - cx.print_const(*self) + fn print(&self, p: &mut P) -> Result<(), PrintError> { + p.print_const(*self) } } @@ -351,9 +351,9 @@ where { fn print(t: &T, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { ty::tls::with(|tcx| { - let mut cx = FmtPrinter::new(tcx, Namespace::TypeNS); - tcx.lift(*t).expect("could not lift for printing").print(&mut cx)?; - fmt.write_str(&cx.into_buffer())?; + let mut p = FmtPrinter::new(tcx, Namespace::TypeNS); + tcx.lift(*t).expect("could not lift for printing").print(&mut p)?; + fmt.write_str(&p.into_buffer())?; Ok(()) }) } diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 38db784dd42eb..2b5425e50275a 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -31,26 +31,26 @@ use crate::ty::{ macro_rules! p { (@$lit:literal) => { - write!(scoped_cx!(), $lit)? + write!(scoped_printer!(), $lit)? }; (@write($($data:expr),+)) => { - write!(scoped_cx!(), $($data),+)? + write!(scoped_printer!(), $($data),+)? }; (@print($x:expr)) => { - $x.print(scoped_cx!())? + $x.print(scoped_printer!())? }; (@$method:ident($($arg:expr),*)) => { - scoped_cx!().$method($($arg),*)? + scoped_printer!().$method($($arg),*)? }; ($($elem:tt $(($($args:tt)*))?),+) => {{ $(p!(@ $elem $(($($args)*))?);)+ }}; } -macro_rules! define_scoped_cx { - ($cx:ident) => { - macro_rules! scoped_cx { +macro_rules! define_scoped_printer { + ($p:ident) => { + macro_rules! scoped_printer { () => { - $cx + $p }; } }; @@ -689,8 +689,8 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } } - self.generic_delimiters(|cx| { - define_scoped_cx!(cx); + self.generic_delimiters(|p| { + define_scoped_printer!(p); p!(print(self_ty)); if let Some(trait_ref) = trait_ref { @@ -708,8 +708,8 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { ) -> Result<(), PrintError> { print_prefix(self)?; - self.generic_delimiters(|cx| { - define_scoped_cx!(cx); + self.generic_delimiters(|p| { + define_scoped_printer!(p); p!("impl "); if let Some(trait_ref) = trait_ref { @@ -722,7 +722,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } fn pretty_print_type(&mut self, ty: Ty<'tcx>) -> Result<(), PrintError> { - define_scoped_cx!(self); + define_scoped_printer!(self); match *ty.kind() { ty::Bool => p!("bool"), @@ -769,8 +769,8 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } ty::FnPtr(ref sig_tys, hdr) => p!(print(sig_tys.with(hdr))), ty::UnsafeBinder(ref bound_ty) => { - self.wrap_binder(bound_ty, WrapBinderMode::Unsafe, |ty, cx| { - cx.pretty_print_type(*ty) + self.wrap_binder(bound_ty, WrapBinderMode::Unsafe, |ty, p| { + p.pretty_print_type(*ty) })?; } ty::Infer(infer_ty) => { @@ -1137,8 +1137,8 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { self.wrap_binder( &bound_args_and_self_ty, WrapBinderMode::ForAll, - |(args, _), cx| { - define_scoped_cx!(cx); + |(args, _), p| { + define_scoped_printer!(p); p!(write("{}", tcx.item_name(trait_def_id))); p!("("); @@ -1181,8 +1181,8 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { for (trait_pred, assoc_items) in traits { write!(self, "{}", if first { "" } else { " + " })?; - self.wrap_binder(&trait_pred, WrapBinderMode::ForAll, |trait_pred, cx| { - define_scoped_cx!(cx); + self.wrap_binder(&trait_pred, WrapBinderMode::ForAll, |trait_pred, p| { + define_scoped_printer!(p); if trait_pred.polarity == ty::PredicatePolarity::Negative { p!("!"); @@ -1322,9 +1322,9 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { ) -> Result<(), PrintError> { let def_key = self.tcx().def_key(alias_ty.def_id); self.path_generic_args( - |cx| { - cx.path_append( - |cx| cx.path_qualified(alias_ty.self_ty(), None), + |p| { + p.path_append( + |p| p.path_qualified(alias_ty.self_ty(), None), &def_key.disambiguated_data, ) }, @@ -1388,15 +1388,15 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { let mut first = true; if let Some(bound_principal) = predicates.principal() { - self.wrap_binder(&bound_principal, WrapBinderMode::ForAll, |principal, cx| { - define_scoped_cx!(cx); + self.wrap_binder(&bound_principal, WrapBinderMode::ForAll, |principal, p| { + define_scoped_printer!(p); p!(print_def_path(principal.def_id, &[])); let mut resugared = false; // Special-case `Fn(...) -> ...` and re-sugar it. - let fn_trait_kind = cx.tcx().fn_trait_kind_from_def_id(principal.def_id); - if !cx.should_print_verbose() && fn_trait_kind.is_some() { + let fn_trait_kind = p.tcx().fn_trait_kind_from_def_id(principal.def_id); + if !p.should_print_verbose() && fn_trait_kind.is_some() { if let ty::Tuple(tys) = principal.args.type_at(0).kind() { let mut projections = predicates.projection_bounds(); if let (Some(proj), None) = (projections.next(), projections.next()) { @@ -1414,18 +1414,18 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { // in order to place the projections inside the `<...>`. if !resugared { let principal_with_self = - principal.with_self_ty(cx.tcx(), cx.tcx().types.trait_object_dummy_self); + principal.with_self_ty(p.tcx(), p.tcx().types.trait_object_dummy_self); - let args = cx + let args = p .tcx() .generics_of(principal_with_self.def_id) - .own_args_no_defaults(cx.tcx(), principal_with_self.args); + .own_args_no_defaults(p.tcx(), principal_with_self.args); let bound_principal_with_self = bound_principal - .with_self_ty(cx.tcx(), cx.tcx().types.trait_object_dummy_self); + .with_self_ty(p.tcx(), p.tcx().types.trait_object_dummy_self); - let clause: ty::Clause<'tcx> = bound_principal_with_self.upcast(cx.tcx()); - let super_projections: Vec<_> = elaborate::elaborate(cx.tcx(), [clause]) + let clause: ty::Clause<'tcx> = bound_principal_with_self.upcast(p.tcx()); + let super_projections: Vec<_> = elaborate::elaborate(p.tcx(), [clause]) .filter_only_self() .filter_map(|clause| clause.as_projection_clause()) .collect(); @@ -1436,15 +1436,15 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { // Filter out projections that are implied by the super predicates. let proj_is_implied = super_projections.iter().any(|&super_proj| { let super_proj = super_proj.map_bound(|super_proj| { - ty::ExistentialProjection::erase_self_ty(cx.tcx(), super_proj) + ty::ExistentialProjection::erase_self_ty(p.tcx(), super_proj) }); // This function is sometimes called on types with erased and // anonymized regions, but the super projections can still // contain named regions. So we erase and anonymize everything // here to compare the types modulo regions below. - let proj = cx.tcx().erase_regions(proj); - let super_proj = cx.tcx().erase_regions(super_proj); + let proj = p.tcx().erase_regions(proj); + let super_proj = p.tcx().erase_regions(super_proj); proj == super_proj }); @@ -1458,15 +1458,15 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { .collect(); projections - .sort_by_cached_key(|proj| cx.tcx().item_name(proj.def_id).to_string()); + .sort_by_cached_key(|proj| p.tcx().item_name(proj.def_id).to_string()); if !args.is_empty() || !projections.is_empty() { - p!(generic_delimiters(|cx| { - cx.comma_sep(args.iter().copied())?; + p!(generic_delimiters(|p| { + p.comma_sep(args.iter().copied())?; if !args.is_empty() && !projections.is_empty() { - write!(cx, ", ")?; + write!(p, ", ")?; } - cx.comma_sep(projections.iter().copied()) + p.comma_sep(projections.iter().copied()) })); } } @@ -1476,11 +1476,11 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { first = false; } - define_scoped_cx!(self); + define_scoped_printer!(self); // Builtin bounds. // FIXME(eddyb) avoid printing twice (needed to ensure - // that the auto traits are sorted *and* printed via cx). + // that the auto traits are sorted *and* printed via p). let mut auto_traits: Vec<_> = predicates.auto_traits().collect(); // The auto traits come ordered by `DefPathHash`. While @@ -1510,7 +1510,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { c_variadic: bool, output: Ty<'tcx>, ) -> Result<(), PrintError> { - define_scoped_cx!(self); + define_scoped_printer!(self); p!("(", comma_sep(inputs.iter().copied())); if c_variadic { @@ -1532,7 +1532,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { ct: ty::Const<'tcx>, print_ty: bool, ) -> Result<(), PrintError> { - define_scoped_cx!(self); + define_scoped_printer!(self); if self.should_print_verbose() { p!(write("{:?}", ct)); @@ -1595,7 +1595,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { expr: Expr<'tcx>, print_ty: bool, ) -> Result<(), PrintError> { - define_scoped_cx!(self); + define_scoped_printer!(self); match expr.kind { ty::ExprKind::Binop(op) => { let (_, _, c1, c2) = expr.binop_args(); @@ -1718,7 +1718,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { ptr: Pointer, ty: Ty<'tcx>, ) -> Result<(), PrintError> { - define_scoped_cx!(self); + define_scoped_printer!(self); let (prov, offset) = ptr.prov_and_relative_offset(); match ty.kind() { @@ -1778,7 +1778,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { ty: Ty<'tcx>, print_ty: bool, ) -> Result<(), PrintError> { - define_scoped_cx!(self); + define_scoped_printer!(self); match ty.kind() { // Bool @@ -1876,7 +1876,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { cv: ty::Value<'tcx>, print_ty: bool, ) -> Result<(), PrintError> { - define_scoped_cx!(self); + define_scoped_printer!(self); if with_reduced_queries() || self.should_print_verbose() { p!(write("ValTree({:?}: ", cv.valtree), print(cv.ty), ")"); @@ -2012,8 +2012,8 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { let kind = closure.kind_ty().to_opt_closure_kind().unwrap_or(ty::ClosureKind::Fn); write!(self, "impl ")?; - self.wrap_binder(&sig, WrapBinderMode::ForAll, |sig, cx| { - define_scoped_cx!(cx); + self.wrap_binder(&sig, WrapBinderMode::ForAll, |sig, p| { + define_scoped_printer!(p); p!(write("{kind}(")); for (i, arg) in sig.inputs()[0].tuple_fields().iter().enumerate() { @@ -2036,7 +2036,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { &mut self, constness: ty::BoundConstness, ) -> Result<(), PrintError> { - define_scoped_cx!(self); + define_scoped_printer!(self); match constness { ty::BoundConstness::Const => { @@ -2061,10 +2061,10 @@ pub(crate) fn pretty_print_const<'tcx>( ) -> fmt::Result { ty::tls::with(|tcx| { let literal = tcx.lift(c).unwrap(); - let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS); - cx.print_alloc_ids = true; - cx.pretty_print_const(literal, print_types)?; - fmt.write_str(&cx.into_buffer())?; + let mut p = FmtPrinter::new(tcx, Namespace::ValueNS); + p.print_alloc_ids = true; + p.pretty_print_const(literal, print_types)?; + fmt.write_str(&p.into_buffer())?; Ok(()) }) } @@ -2184,7 +2184,7 @@ impl<'t> TyCtxt<'t> { let ns = guess_def_namespace(self, def_id); debug!("def_path_str: def_id={:?}, ns={:?}", def_id, ns); - FmtPrinter::print_string(self, ns, |cx| cx.print_def_path(def_id, args)).unwrap() + FmtPrinter::print_string(self, ns, |p| p.print_def_path(def_id, args)).unwrap() } pub fn value_path_str_with_args( @@ -2196,7 +2196,7 @@ impl<'t> TyCtxt<'t> { let ns = guess_def_namespace(self, def_id); debug!("value_path_str: def_id={:?}, ns={:?}", def_id, ns); - FmtPrinter::print_string(self, ns, |cx| cx.print_value_path(def_id, args)).unwrap() + FmtPrinter::print_string(self, ns, |p| p.print_value_path(def_id, args)).unwrap() } } @@ -2363,10 +2363,10 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> { trait_ref: Option>, ) -> Result<(), PrintError> { self.pretty_path_append_impl( - |cx| { - print_prefix(cx)?; - if !cx.empty_path { - write!(cx, "::")?; + |p| { + print_prefix(p)?; + if !p.empty_path { + write!(p, "::")?; } Ok(()) @@ -2420,7 +2420,7 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> { if self.in_value { write!(self, "::")?; } - self.generic_delimiters(|cx| cx.comma_sep(args.iter().copied())) + self.generic_delimiters(|p| p.comma_sep(args.iter().copied())) } else { Ok(()) } @@ -2562,7 +2562,7 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { ty: Ty<'tcx>, ) -> Result<(), PrintError> { let print = |this: &mut Self| { - define_scoped_cx!(this); + define_scoped_printer!(this); if this.print_alloc_ids { p!(write("{:?}", p)); } else { @@ -2577,7 +2577,7 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { // HACK(eddyb) limited to `FmtPrinter` because of `region_highlight_mode`. impl<'tcx> FmtPrinter<'_, 'tcx> { pub fn pretty_print_region(&mut self, region: ty::Region<'tcx>) -> Result<(), fmt::Error> { - define_scoped_cx!(self); + define_scoped_printer!(self); // Watch out for region highlights. let highlight = self.region_highlight_mode; @@ -2755,17 +2755,17 @@ impl<'tcx> FmtPrinter<'_, 'tcx> { debug!("self.used_region_names: {:?}", self.used_region_names); let mut empty = true; - let mut start_or_continue = |cx: &mut Self, start: &str, cont: &str| { + let mut start_or_continue = |p: &mut Self, start: &str, cont: &str| { let w = if empty { empty = false; start } else { cont }; - let _ = write!(cx, "{w}"); + let _ = write!(p, "{w}"); }; - let do_continue = |cx: &mut Self, cont: Symbol| { - let _ = write!(cx, "{cont}"); + let do_continue = |p: &mut Self, cont: Symbol| { + let _ = write!(p, "{cont}"); }; let possible_names = ('a'..='z').rev().map(|s| Symbol::intern(&format!("'{s}"))); @@ -2918,8 +2918,8 @@ impl<'tcx, T, P: PrettyPrinter<'tcx>> Print<'tcx, P> for ty::Binder<'tcx, T> where T: Print<'tcx, P> + TypeFoldable>, { - fn print(&self, cx: &mut P) -> Result<(), PrintError> { - cx.print_in_binder(self) + fn print(&self, p: &mut P) -> Result<(), PrintError> { + p.print_in_binder(self) } } @@ -2927,8 +2927,8 @@ impl<'tcx, T, P: PrettyPrinter<'tcx>> Print<'tcx, P> for ty::OutlivesPredicate<' where T: Print<'tcx, P>, { - fn print(&self, cx: &mut P) -> Result<(), PrintError> { - define_scoped_cx!(cx); + fn print(&self, p: &mut P) -> Result<(), PrintError> { + define_scoped_printer!(p); p!(print(self.0), ": ", print(self.1)); Ok(()) } @@ -3068,11 +3068,11 @@ macro_rules! forward_display_to_print { $(#[allow(unused_lifetimes)] impl<'tcx> fmt::Display for $ty { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { ty::tls::with(|tcx| { - let mut cx = FmtPrinter::new(tcx, Namespace::TypeNS); + let mut p = FmtPrinter::new(tcx, Namespace::TypeNS); tcx.lift(*self) .expect("could not lift for printing") - .print(&mut cx)?; - f.write_str(&cx.into_buffer())?; + .print(&mut p)?; + f.write_str(&p.into_buffer())?; Ok(()) }) } @@ -3081,10 +3081,10 @@ macro_rules! forward_display_to_print { } macro_rules! define_print { - (($self:ident, $cx:ident): $($ty:ty $print:block)+) => { + (($self:ident, $p:ident): $($ty:ty $print:block)+) => { $(impl<'tcx, P: PrettyPrinter<'tcx>> Print<'tcx, P> for $ty { - fn print(&$self, $cx: &mut P) -> Result<(), PrintError> { - define_scoped_cx!($cx); + fn print(&$self, $p: &mut P) -> Result<(), PrintError> { + define_scoped_printer!($p); let _: () = $print; Ok(()) } @@ -3093,8 +3093,8 @@ macro_rules! define_print { } macro_rules! define_print_and_forward_display { - (($self:ident, $cx:ident): $($ty:ty $print:block)+) => { - define_print!(($self, $cx): $($ty $print)*); + (($self:ident, $p:ident): $($ty:ty $print:block)+) => { + define_print!(($self, $p): $($ty $print)*); forward_display_to_print!($($ty),+); }; } @@ -3107,7 +3107,7 @@ forward_display_to_print! { } define_print! { - (self, cx): + (self, p): ty::FnSig<'tcx> { p!(write("{}", self.safety.prefix_str())); @@ -3129,11 +3129,11 @@ define_print! { } ty::AliasTerm<'tcx> { - match self.kind(cx.tcx()) { + match self.kind(p.tcx()) { ty::AliasTermKind::InherentTy | ty::AliasTermKind::InherentConst => p!(pretty_print_inherent_projection(*self)), ty::AliasTermKind::ProjectionTy => { - if !(cx.should_print_verbose() || with_reduced_queries()) - && cx.tcx().is_impl_trait_in_trait(self.def_id) + if !(p.should_print_verbose() || with_reduced_queries()) + && p.tcx().is_impl_trait_in_trait(self.def_id) { p!(pretty_print_rpitit(self.def_id, self.args)) } else { @@ -3222,46 +3222,46 @@ define_print! { ty::ExistentialTraitRef<'tcx> { // Use a type that can't appear in defaults of type parameters. - let dummy_self = Ty::new_fresh(cx.tcx(), 0); - let trait_ref = self.with_self_ty(cx.tcx(), dummy_self); + let dummy_self = Ty::new_fresh(p.tcx(), 0); + let trait_ref = self.with_self_ty(p.tcx(), dummy_self); p!(print(trait_ref.print_only_trait_path())) } ty::ExistentialProjection<'tcx> { - let name = cx.tcx().associated_item(self.def_id).name(); + let name = p.tcx().associated_item(self.def_id).name(); // The args don't contain the self ty (as it has been erased) but the corresp. // generics do as the trait always has a self ty param. We need to offset. - let args = &self.args[cx.tcx().generics_of(self.def_id).parent_count - 1..]; - p!(path_generic_args(|cx| write!(cx, "{name}"), args), " = ", print(self.term)) + let args = &self.args[p.tcx().generics_of(self.def_id).parent_count - 1..]; + p!(path_generic_args(|p| write!(p, "{name}"), args), " = ", print(self.term)) } ty::ProjectionPredicate<'tcx> { p!(print(self.projection_term), " == "); - cx.reset_type_limit(); + p.reset_type_limit(); p!(print(self.term)) } ty::SubtypePredicate<'tcx> { p!(print(self.a), " <: "); - cx.reset_type_limit(); + p.reset_type_limit(); p!(print(self.b)) } ty::CoercePredicate<'tcx> { p!(print(self.a), " -> "); - cx.reset_type_limit(); + p.reset_type_limit(); p!(print(self.b)) } ty::NormalizesTo<'tcx> { p!(print(self.alias), " normalizes-to "); - cx.reset_type_limit(); + p.reset_type_limit(); p!(print(self.term)) } } define_print_and_forward_display! { - (self, cx): + (self, p): &'tcx ty::List> { p!("{{", comma_sep(self.iter()), "}}") @@ -3273,10 +3273,10 @@ define_print_and_forward_display! { TraitRefPrintSugared<'tcx> { if !with_reduced_queries() - && cx.tcx().trait_def(self.0.def_id).paren_sugar + && p.tcx().trait_def(self.0.def_id).paren_sugar && let ty::Tuple(args) = self.0.args.type_at(1).kind() { - p!(write("{}", cx.tcx().item_name(self.0.def_id)), "("); + p!(write("{}", p.tcx().item_name(self.0.def_id)), "("); for (i, arg) in args.iter().enumerate() { if i > 0 { p!(", "); @@ -3322,9 +3322,9 @@ define_print_and_forward_display! { ty::PlaceholderType { match self.bound.kind { ty::BoundTyKind::Anon => p!(write("{self:?}")), - ty::BoundTyKind::Param(def_id) => match cx.should_print_verbose() { + ty::BoundTyKind::Param(def_id) => match p.should_print_verbose() { true => p!(write("{self:?}")), - false => p!(write("{}", cx.tcx().item_name(def_id))), + false => p!(write("{}", p.tcx().item_name(def_id))), }, } } diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs index 10e499d9c758c..0e2aff6f9bdac 100644 --- a/compiler/rustc_middle/src/ty/structural_impls.rs +++ b/compiler/rustc_middle/src/ty/structural_impls.rs @@ -25,8 +25,8 @@ impl fmt::Debug for ty::TraitDef { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { ty::tls::with(|tcx| { with_no_trimmed_paths!({ - let s = FmtPrinter::print_string(tcx, Namespace::TypeNS, |cx| { - cx.print_def_path(self.def_id, &[]) + let s = FmtPrinter::print_string(tcx, Namespace::TypeNS, |p| { + p.print_def_path(self.def_id, &[]) })?; f.write_str(&s) }) @@ -38,8 +38,8 @@ impl<'tcx> fmt::Debug for ty::AdtDef<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { ty::tls::with(|tcx| { with_no_trimmed_paths!({ - let s = FmtPrinter::print_string(tcx, Namespace::TypeNS, |cx| { - cx.print_def_path(self.did(), &[]) + let s = FmtPrinter::print_string(tcx, Namespace::TypeNS, |p| { + p.print_def_path(self.did(), &[]) })?; f.write_str(&s) }) @@ -170,9 +170,9 @@ impl<'tcx> fmt::Debug for ty::Const<'tcx> { if let ConstKind::Value(cv) = self.kind() { return ty::tls::with(move |tcx| { let cv = tcx.lift(cv).unwrap(); - let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS); - cx.pretty_print_const_valtree(cv, /*print_ty*/ true)?; - f.write_str(&cx.into_buffer()) + let mut p = FmtPrinter::new(tcx, Namespace::ValueNS); + p.pretty_print_const_valtree(cv, /*print_ty*/ true)?; + f.write_str(&p.into_buffer()) }); } // Fall back to something verbose. diff --git a/compiler/rustc_symbol_mangling/src/legacy.rs b/compiler/rustc_symbol_mangling/src/legacy.rs index a3bc650427b76..d1834abb32b07 100644 --- a/compiler/rustc_symbol_mangling/src/legacy.rs +++ b/compiler/rustc_symbol_mangling/src/legacy.rs @@ -58,59 +58,57 @@ pub(super) fn mangle<'tcx>( let hash = get_symbol_hash(tcx, instance, instance_ty, instantiating_crate); - let mut printer = SymbolPrinter { tcx, path: SymbolPath::new(), keep_within_component: false }; - printer - .print_def_path( - def_id, - if let ty::InstanceKind::DropGlue(_, _) - | ty::InstanceKind::AsyncDropGlueCtorShim(_, _) - | ty::InstanceKind::FutureDropPollShim(_, _, _) = instance.def - { - // Add the name of the dropped type to the symbol name - &*instance.args - } else if let ty::InstanceKind::AsyncDropGlue(_, ty) = instance.def { - let ty::Coroutine(_, cor_args) = ty.kind() else { - bug!(); - }; - let drop_ty = cor_args.first().unwrap().expect_ty(); - tcx.mk_args(&[GenericArg::from(drop_ty)]) - } else { - &[] - }, - ) - .unwrap(); + let mut p = SymbolPrinter { tcx, path: SymbolPath::new(), keep_within_component: false }; + p.print_def_path( + def_id, + if let ty::InstanceKind::DropGlue(_, _) + | ty::InstanceKind::AsyncDropGlueCtorShim(_, _) + | ty::InstanceKind::FutureDropPollShim(_, _, _) = instance.def + { + // Add the name of the dropped type to the symbol name + &*instance.args + } else if let ty::InstanceKind::AsyncDropGlue(_, ty) = instance.def { + let ty::Coroutine(_, cor_args) = ty.kind() else { + bug!(); + }; + let drop_ty = cor_args.first().unwrap().expect_ty(); + tcx.mk_args(&[GenericArg::from(drop_ty)]) + } else { + &[] + }, + ) + .unwrap(); match instance.def { ty::InstanceKind::ThreadLocalShim(..) => { - printer.write_str("{{tls-shim}}").unwrap(); + p.write_str("{{tls-shim}}").unwrap(); } ty::InstanceKind::VTableShim(..) => { - printer.write_str("{{vtable-shim}}").unwrap(); + p.write_str("{{vtable-shim}}").unwrap(); } ty::InstanceKind::ReifyShim(_, reason) => { - printer.write_str("{{reify-shim").unwrap(); + p.write_str("{{reify-shim").unwrap(); match reason { - Some(ReifyReason::FnPtr) => printer.write_str("-fnptr").unwrap(), - Some(ReifyReason::Vtable) => printer.write_str("-vtable").unwrap(), + Some(ReifyReason::FnPtr) => p.write_str("-fnptr").unwrap(), + Some(ReifyReason::Vtable) => p.write_str("-vtable").unwrap(), None => (), } - printer.write_str("}}").unwrap(); + p.write_str("}}").unwrap(); } // FIXME(async_closures): This shouldn't be needed when we fix // `Instance::ty`/`Instance::def_id`. ty::InstanceKind::ConstructCoroutineInClosureShim { receiver_by_ref, .. } => { - printer - .write_str(if receiver_by_ref { "{{by-move-shim}}" } else { "{{by-ref-shim}}" }) + p.write_str(if receiver_by_ref { "{{by-move-shim}}" } else { "{{by-ref-shim}}" }) .unwrap(); } _ => {} } if let ty::InstanceKind::FutureDropPollShim(..) = instance.def { - let _ = printer.write_str("{{drop-shim}}"); + let _ = p.write_str("{{drop-shim}}"); } - printer.path.finish(hash) + p.path.finish(hash) } fn get_symbol_hash<'tcx>( diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs index fe0f8e6113ef7..ce1eb1a164860 100644 --- a/compiler/rustc_symbol_mangling/src/v0.rs +++ b/compiler/rustc_symbol_mangling/src/v0.rs @@ -33,7 +33,7 @@ pub(super) fn mangle<'tcx>( let args = tcx.normalize_erasing_regions(ty::TypingEnv::fully_monomorphized(), instance.args); let prefix = "_R"; - let mut cx: SymbolMangler<'_> = SymbolMangler { + let mut p: SymbolMangler<'_> = SymbolMangler { tcx, start_offset: prefix.len(), is_exportable, @@ -69,16 +69,16 @@ pub(super) fn mangle<'tcx>( bug!(); }; let drop_ty = cor_args.first().unwrap().expect_ty(); - cx.print_def_path(def_id, tcx.mk_args(&[GenericArg::from(drop_ty)])).unwrap() + p.print_def_path(def_id, tcx.mk_args(&[GenericArg::from(drop_ty)])).unwrap() } else if let Some(shim_kind) = shim_kind { - cx.path_append_ns(|cx| cx.print_def_path(def_id, args), 'S', 0, shim_kind).unwrap() + p.path_append_ns(|p| p.print_def_path(def_id, args), 'S', 0, shim_kind).unwrap() } else { - cx.print_def_path(def_id, args).unwrap() + p.print_def_path(def_id, args).unwrap() }; if let Some(instantiating_crate) = instantiating_crate { - cx.print_def_path(instantiating_crate.as_def_id(), &[]).unwrap(); + p.print_def_path(instantiating_crate.as_def_id(), &[]).unwrap(); } - std::mem::take(&mut cx.out) + std::mem::take(&mut p.out) } pub fn mangle_internal_symbol<'tcx>(tcx: TyCtxt<'tcx>, item_name: &str) -> String { @@ -88,7 +88,7 @@ pub fn mangle_internal_symbol<'tcx>(tcx: TyCtxt<'tcx>, item_name: &str) -> Strin } let prefix = "_R"; - let mut cx: SymbolMangler<'_> = SymbolMangler { + let mut p: SymbolMangler<'_> = SymbolMangler { tcx, start_offset: prefix.len(), is_exportable: false, @@ -99,10 +99,10 @@ pub fn mangle_internal_symbol<'tcx>(tcx: TyCtxt<'tcx>, item_name: &str) -> Strin out: String::from(prefix), }; - cx.path_append_ns( - |cx| { - cx.push("C"); - cx.push_disambiguator({ + p.path_append_ns( + |p| { + p.push("C"); + p.push_disambiguator({ let mut hasher = StableHasher::new(); // Incorporate the rustc version to ensure #[rustc_std_internal_symbol] functions // get a different symbol name depending on the rustc version. @@ -114,7 +114,7 @@ pub fn mangle_internal_symbol<'tcx>(tcx: TyCtxt<'tcx>, item_name: &str) -> Strin let hash: Hash64 = hasher.finish(); hash.as_u64() }); - cx.push_ident("__rustc"); + p.push_ident("__rustc"); Ok(()) }, 'v', @@ -123,7 +123,7 @@ pub fn mangle_internal_symbol<'tcx>(tcx: TyCtxt<'tcx>, item_name: &str) -> Strin ) .unwrap(); - std::mem::take(&mut cx.out) + std::mem::take(&mut p.out) } pub(super) fn mangle_typeid_for_trait_ref<'tcx>( @@ -131,7 +131,7 @@ pub(super) fn mangle_typeid_for_trait_ref<'tcx>( trait_ref: ty::ExistentialTraitRef<'tcx>, ) -> String { // FIXME(flip1995): See comment in `mangle_typeid_for_fnabi`. - let mut cx = SymbolMangler { + let mut p = SymbolMangler { tcx, start_offset: 0, is_exportable: false, @@ -141,8 +141,8 @@ pub(super) fn mangle_typeid_for_trait_ref<'tcx>( binders: vec![], out: String::new(), }; - cx.print_def_path(trait_ref.def_id, &[]).unwrap(); - std::mem::take(&mut cx.out) + p.print_def_path(trait_ref.def_id, &[]).unwrap(); + std::mem::take(&mut p.out) } struct BinderLevel { @@ -368,7 +368,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { self.path_generic_args( |this| { this.path_append_ns( - |cx| cx.print_def_path(parent_def_id, &[]), + |p| p.print_def_path(parent_def_id, &[]), 'I', key.disambiguated_data.disambiguator as u64, "", @@ -542,31 +542,31 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { ty::FnPtr(sig_tys, hdr) => { let sig = sig_tys.with(hdr); self.push("F"); - self.wrap_binder(&sig, |cx, sig| { + self.wrap_binder(&sig, |p, sig| { if sig.safety.is_unsafe() { - cx.push("U"); + p.push("U"); } match sig.abi { ExternAbi::Rust => {} - ExternAbi::C { unwind: false } => cx.push("KC"), + ExternAbi::C { unwind: false } => p.push("KC"), abi => { - cx.push("K"); + p.push("K"); let name = abi.as_str(); if name.contains('-') { - cx.push_ident(&name.replace('-', "_")); + p.push_ident(&name.replace('-', "_")); } else { - cx.push_ident(name); + p.push_ident(name); } } } for &ty in sig.inputs() { - ty.print(cx)?; + ty.print(p)?; } if sig.c_variadic { - cx.push("v"); + p.push("v"); } - cx.push("E"); - sig.output().print(cx) + p.push("E"); + sig.output().print(p) })?; } @@ -623,7 +623,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { // [ [{}]] [{}] // Since any predicates after the first one shouldn't change the binders, // just put them all in the binders of the first. - self.wrap_binder(&predicates[0], |cx, _| { + self.wrap_binder(&predicates[0], |p, _| { for predicate in predicates.iter() { // It would be nice to be able to validate bound vars here, but // projections can actually include bound vars from super traits @@ -632,21 +632,21 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { match predicate.as_ref().skip_binder() { ty::ExistentialPredicate::Trait(trait_ref) => { // Use a type that can't appear in defaults of type parameters. - let dummy_self = Ty::new_fresh(cx.tcx, 0); - let trait_ref = trait_ref.with_self_ty(cx.tcx, dummy_self); - cx.print_def_path(trait_ref.def_id, trait_ref.args)?; + let dummy_self = Ty::new_fresh(p.tcx, 0); + let trait_ref = trait_ref.with_self_ty(p.tcx, dummy_self); + p.print_def_path(trait_ref.def_id, trait_ref.args)?; } ty::ExistentialPredicate::Projection(projection) => { - let name = cx.tcx.associated_item(projection.def_id).name(); - cx.push("p"); - cx.push_ident(name.as_str()); + let name = p.tcx.associated_item(projection.def_id).name(); + p.push("p"); + p.push_ident(name.as_str()); match projection.term.kind() { - ty::TermKind::Ty(ty) => ty.print(cx), - ty::TermKind::Const(c) => c.print(cx), + ty::TermKind::Ty(ty) => ty.print(p), + ty::TermKind::Const(c) => c.print(p), }?; } ty::ExistentialPredicate::AutoTrait(def_id) => { - cx.print_def_path(*def_id, &[])?; + p.print_def_path(*def_id, &[])?; } } } diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs index c158cce965736..8daa5d5347a0e 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs @@ -301,8 +301,8 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // let _ = [{struct Foo; Foo}, {struct Foo; Foo}]; if did1.krate != did2.krate { let abs_path = |def_id| { - let mut printer = AbsolutePathPrinter { tcx: self.tcx, segments: vec![] }; - printer.print_def_path(def_id, &[]).map(|_| printer.segments) + let mut p = AbsolutePathPrinter { tcx: self.tcx, segments: vec![] }; + p.print_def_path(def_id, &[]).map(|_| p.segments) }; // We compare strings because DefPath can be different diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs index 022d549a9df80..966f117a1bf91 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs @@ -244,7 +244,7 @@ impl<'a, 'tcx> TypeFolder> for ClosureEraser<'a, 'tcx> { } fn fmt_printer<'a, 'tcx>(infcx: &'a InferCtxt<'tcx>, ns: Namespace) -> FmtPrinter<'a, 'tcx> { - let mut printer = FmtPrinter::new(infcx.tcx, ns); + let mut p = FmtPrinter::new(infcx.tcx, ns); let ty_getter = move |ty_vid| { if infcx.probe_ty_var(ty_vid).is_ok() { warn!("resolved ty var in error message"); @@ -270,11 +270,11 @@ fn fmt_printer<'a, 'tcx>(infcx: &'a InferCtxt<'tcx>, ns: Namespace) -> FmtPrinte None } }; - printer.ty_infer_name_resolver = Some(Box::new(ty_getter)); + p.ty_infer_name_resolver = Some(Box::new(ty_getter)); let const_getter = move |ct_vid| Some(infcx.tcx.item_name(infcx.const_var_origin(ct_vid)?.param_def_id?)); - printer.const_infer_name_resolver = Some(Box::new(const_getter)); - printer + p.const_infer_name_resolver = Some(Box::new(const_getter)); + p } fn ty_to_string<'tcx>( @@ -282,7 +282,7 @@ fn ty_to_string<'tcx>( ty: Ty<'tcx>, called_method_def_id: Option, ) -> String { - let mut printer = fmt_printer(infcx, Namespace::TypeNS); + let mut p = fmt_printer(infcx, Namespace::TypeNS); let ty = infcx.resolve_vars_if_possible(ty); // We use `fn` ptr syntax for closures, but this only works when the closure does not capture // anything. We also remove all type parameters that are fully known to the type system. @@ -292,8 +292,8 @@ fn ty_to_string<'tcx>( // We don't want the regular output for `fn`s because it includes its path in // invalid pseudo-syntax, we want the `fn`-pointer output instead. (ty::FnDef(..), _) => { - ty.fn_sig(infcx.tcx).print(&mut printer).unwrap(); - printer.into_buffer() + ty.fn_sig(infcx.tcx).print(&mut p).unwrap(); + p.into_buffer() } (_, Some(def_id)) if ty.is_ty_or_numeric_infer() @@ -303,8 +303,8 @@ fn ty_to_string<'tcx>( } _ if ty.is_ty_or_numeric_infer() => "/* Type */".to_string(), _ => { - ty.print(&mut printer).unwrap(); - printer.into_buffer() + ty.print(&mut p).unwrap(); + p.into_buffer() } } } @@ -561,21 +561,20 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { { "Vec<_>".to_string() } else { - let mut printer = fmt_printer(self, Namespace::TypeNS); - printer - .comma_sep(generic_args.iter().copied().map(|arg| { - if arg.is_suggestable(self.tcx, true) { - return arg; - } + let mut p = fmt_printer(self, Namespace::TypeNS); + p.comma_sep(generic_args.iter().copied().map(|arg| { + if arg.is_suggestable(self.tcx, true) { + return arg; + } - match arg.kind() { - GenericArgKind::Lifetime(_) => bug!("unexpected lifetime"), - GenericArgKind::Type(_) => self.next_ty_var(DUMMY_SP).into(), - GenericArgKind::Const(_) => self.next_const_var(DUMMY_SP).into(), - } - })) - .unwrap(); - printer.into_buffer() + match arg.kind() { + GenericArgKind::Lifetime(_) => bug!("unexpected lifetime"), + GenericArgKind::Type(_) => self.next_ty_var(DUMMY_SP).into(), + GenericArgKind::Const(_) => self.next_const_var(DUMMY_SP).into(), + } + })) + .unwrap(); + p.into_buffer() }; if !have_turbofish { @@ -589,9 +588,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { InferSourceKind::FullyQualifiedMethodCall { receiver, successor, args, def_id } => { let placeholder = Some(self.next_ty_var(DUMMY_SP)); if let Some(args) = args.make_suggestable(self.infcx.tcx, true, placeholder) { - let mut printer = fmt_printer(self, Namespace::ValueNS); - printer.print_def_path(def_id, args).unwrap(); - let def_path = printer.into_buffer(); + let mut p = fmt_printer(self, Namespace::ValueNS); + p.print_def_path(def_id, args).unwrap(); + let def_path = p.into_buffer(); // We only care about whether we have to add `&` or `&mut ` for now. // This is the case if the last adjustment is a borrow and the diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/placeholder_error.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/placeholder_error.rs index 64fc365c44a65..373b756dcdb71 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/placeholder_error.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/placeholder_error.rs @@ -47,11 +47,11 @@ where T: for<'a> Print<'tcx, FmtPrinter<'a, 'tcx>>, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut printer = ty::print::FmtPrinter::new(self.tcx, self.ns); - printer.region_highlight_mode = self.highlight; + let mut p = ty::print::FmtPrinter::new(self.tcx, self.ns); + p.region_highlight_mode = self.highlight; - self.value.print(&mut printer)?; - f.write_str(&printer.into_buffer()) + self.value.print(&mut p)?; + f.write_str(&p.into_buffer()) } } diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs index 8e0620f20487b..129d0963a75a8 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs @@ -946,8 +946,8 @@ fn foo(&self) -> Self::T { String::new() } } pub fn format_generic_args(&self, args: &[ty::GenericArg<'tcx>]) -> String { - FmtPrinter::print_string(self.tcx, hir::def::Namespace::TypeNS, |cx| { - cx.path_generic_args(|_| Ok(()), args) + FmtPrinter::print_string(self.tcx, hir::def::Namespace::TypeNS, |p| { + p.path_generic_args(|_| Ok(()), args) }) .expect("could not write to `String`.") } diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/overflow.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/overflow.rs index d929ecf68bf34..4f1f5c330e5f4 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/overflow.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/overflow.rs @@ -66,10 +66,10 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if s.len() > 50 { // We don't need to save the type to a file, we will be talking about this type already // in a separate note when we explain the obligation, so it will be available that way. - let mut cx: FmtPrinter<'_, '_> = + let mut p: FmtPrinter<'_, '_> = FmtPrinter::new_with_limit(tcx, Namespace::TypeNS, rustc_session::Limit(6)); - value.print(&mut cx).unwrap(); - cx.into_buffer() + value.print(&mut p).unwrap(); + p.into_buffer() } else { s } From 03bc1be8dd9be8fb6a0672e89731d0826dff0a09 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 1 Aug 2025 13:10:35 +1000 Subject: [PATCH 086/118] Simplify `SymbolMangler::print_type`. `Bound`/`Placeholder`/`Infer`/`Error` shouldn't occur, so we can handle them in the second exhaustive `match`, and ignore them in the first non-exhaustive `match`. --- compiler/rustc_symbol_mangling/src/v0.rs | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs index ce1eb1a164860..a34d8b4436ed2 100644 --- a/compiler/rustc_symbol_mangling/src/v0.rs +++ b/compiler/rustc_symbol_mangling/src/v0.rs @@ -425,7 +425,6 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { ty::Bool => "b", ty::Char => "c", ty::Str => "e", - ty::Tuple(_) if ty.is_unit() => "u", ty::Int(IntTy::I8) => "a", ty::Int(IntTy::I16) => "s", ty::Int(IntTy::I32) => "l", @@ -444,12 +443,12 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { ty::Float(FloatTy::F128) => "C4f128", ty::Never => "z", + ty::Tuple(_) if ty.is_unit() => "u", + // Should only be encountered within the identity-substituted // impl header of an item nested within an impl item. ty::Param(_) => "p", - ty::Bound(..) | ty::Placeholder(_) | ty::Infer(_) | ty::Error(_) => bug!(), - _ => "", }; if !basic_type.is_empty() { @@ -468,11 +467,9 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { unreachable!() } ty::Tuple(_) if ty.is_unit() => unreachable!(), + ty::Param(_) => unreachable!(), - // Placeholders, also handled as part of basic types. - ty::Param(_) | ty::Bound(..) | ty::Placeholder(_) | ty::Infer(_) | ty::Error(_) => { - unreachable!() - } + ty::Bound(..) | ty::Placeholder(_) | ty::Infer(_) | ty::Error(_) => bug!(), ty::Ref(r, ty, mutbl) => { self.push(match mutbl { From 03dab500a2a885c3cf196d6d72695deda7da7698 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 1 Aug 2025 15:08:01 +1000 Subject: [PATCH 087/118] Remove `p!`. It's a cryptic macro that makes some things slightly more concise in `PrettyPrinter`. E.g. if you declare `define_scope_printer!(p)` in a scope you can then call `p! to get these transformations: ``` p!("foo"); --> write!(p, "foo")?; p!(print(ty)); --> ty.print(p)?; p!(method(args)); --> p.method(args)?; ``` You can also chain calls, e.g.: ``` p!("foo", print(ty)); --> write!(p, "foo")?; ty.print(p)?; ``` Ultimately this doesn't seem worth it. The macro definition is hard to read, the call sites are hard to read, `define_scope_printer!` is pretty gross, and the code size reductions are small. Tellingly, many normal `write!` and `print` calls are sprinkled throughout the code, probably because people have made modifications and didn't want to use or understand how to use `p!`. This commit removes it. --- compiler/rustc_middle/src/ty/print/pretty.rs | 732 ++++++++++--------- 1 file changed, 370 insertions(+), 362 deletions(-) diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 2b5425e50275a..538179245c426 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -29,33 +29,6 @@ use crate::ty::{ TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, }; -macro_rules! p { - (@$lit:literal) => { - write!(scoped_printer!(), $lit)? - }; - (@write($($data:expr),+)) => { - write!(scoped_printer!(), $($data),+)? - }; - (@print($x:expr)) => { - $x.print(scoped_printer!())? - }; - (@$method:ident($($arg:expr),*)) => { - scoped_printer!().$method($($arg),*)? - }; - ($($elem:tt $(($($args:tt)*))?),+) => {{ - $(p!(@ $elem $(($($args)*))?);)+ - }}; -} -macro_rules! define_scoped_printer { - ($p:ident) => { - macro_rules! scoped_printer { - () => { - $p - }; - } - }; -} - thread_local! { static FORCE_IMPL_FILENAME_LINE: Cell = const { Cell::new(false) }; static SHOULD_PREFIX_WITH_CRATE: Cell = const { Cell::new(false) }; @@ -690,11 +663,10 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } self.generic_delimiters(|p| { - define_scoped_printer!(p); - - p!(print(self_ty)); + self_ty.print(p)?; if let Some(trait_ref) = trait_ref { - p!(" as ", print(trait_ref.print_only_trait_path())); + write!(p, " as ")?; + trait_ref.print_only_trait_path().print(p)?; } Ok(()) }) @@ -709,65 +681,69 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { print_prefix(self)?; self.generic_delimiters(|p| { - define_scoped_printer!(p); - - p!("impl "); + write!(p, "impl ")?; if let Some(trait_ref) = trait_ref { - p!(print(trait_ref.print_only_trait_path()), " for "); + trait_ref.print_only_trait_path().print(p)?; + write!(p, " for ")?; } - p!(print(self_ty)); + self_ty.print(p)?; Ok(()) }) } fn pretty_print_type(&mut self, ty: Ty<'tcx>) -> Result<(), PrintError> { - define_scoped_printer!(self); - match *ty.kind() { - ty::Bool => p!("bool"), - ty::Char => p!("char"), - ty::Int(t) => p!(write("{}", t.name_str())), - ty::Uint(t) => p!(write("{}", t.name_str())), - ty::Float(t) => p!(write("{}", t.name_str())), + ty::Bool => write!(self, "bool")?, + ty::Char => write!(self, "char")?, + ty::Int(t) => write!(self, "{}", t.name_str())?, + ty::Uint(t) => write!(self, "{}", t.name_str())?, + ty::Float(t) => write!(self, "{}", t.name_str())?, ty::Pat(ty, pat) => { - p!("(", print(ty), ") is ", write("{pat:?}")) + write!(self, "(")?; + ty.print(self)?; + write!(self, ") is {pat:?}")?; } ty::RawPtr(ty, mutbl) => { - p!(write("*{} ", mutbl.ptr_str())); - p!(print(ty)) + write!(self, "*{} ", mutbl.ptr_str())?; + ty.print(self)?; } ty::Ref(r, ty, mutbl) => { - p!("&"); + write!(self, "&")?; if self.should_print_region(r) { - p!(print(r), " "); + r.print(self)?; + write!(self, " ")?; } - p!(print(ty::TypeAndMut { ty, mutbl })) + ty::TypeAndMut { ty, mutbl }.print(self)?; } - ty::Never => p!("!"), + ty::Never => write!(self, "!")?, ty::Tuple(tys) => { - p!("(", comma_sep(tys.iter())); + write!(self, "(")?; + self.comma_sep(tys.iter())?; if tys.len() == 1 { - p!(","); + write!(self, ",")?; } - p!(")") + write!(self, ")")?; } ty::FnDef(def_id, args) => { if with_reduced_queries() { - p!(print_def_path(def_id, args)); + self.print_def_path(def_id, args)?; } else { let mut sig = self.tcx().fn_sig(def_id).instantiate(self.tcx(), args); if self.tcx().codegen_fn_attrs(def_id).safe_target_features { - p!("#[target_features] "); + write!(self, "#[target_features] ")?; sig = sig.map_bound(|mut sig| { sig.safety = hir::Safety::Safe; sig }); } - p!(print(sig), " {{", print_value_path(def_id, args), "}}"); + sig.print(self)?; + write!(self, " {{")?; + self.print_value_path(def_id, args)?; + write!(self, "}}")?; } } - ty::FnPtr(ref sig_tys, hdr) => p!(print(sig_tys.with(hdr))), + ty::FnPtr(ref sig_tys, hdr) => sig_tys.with(hdr).print(self)?, ty::UnsafeBinder(ref bound_ty) => { self.wrap_binder(bound_ty, WrapBinderMode::Unsafe, |ty, p| { p.pretty_print_type(*ty) @@ -775,54 +751,50 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } ty::Infer(infer_ty) => { if self.should_print_verbose() { - p!(write("{:?}", ty.kind())); + write!(self, "{:?}", ty.kind())?; return Ok(()); } if let ty::TyVar(ty_vid) = infer_ty { if let Some(name) = self.ty_infer_name(ty_vid) { - p!(write("{}", name)) + write!(self, "{name}")?; } else { - p!(write("{}", infer_ty)) + write!(self, "{infer_ty}")?; } } else { - p!(write("{}", infer_ty)) + write!(self, "{infer_ty}")?; } } - ty::Error(_) => p!("{{type error}}"), - ty::Param(ref param_ty) => p!(print(param_ty)), + ty::Error(_) => write!(self, "{{type error}}")?, + ty::Param(ref param_ty) => param_ty.print(self)?, ty::Bound(debruijn, bound_ty) => match bound_ty.kind { ty::BoundTyKind::Anon => { rustc_type_ir::debug_bound_var(self, debruijn, bound_ty.var)? } ty::BoundTyKind::Param(def_id) => match self.should_print_verbose() { - true => p!(write("{:?}", ty.kind())), - false => p!(write("{}", self.tcx().item_name(def_id))), + true => write!(self, "{:?}", ty.kind())?, + false => write!(self, "{}", self.tcx().item_name(def_id))?, }, }, - ty::Adt(def, args) => { - p!(print_def_path(def.did(), args)); - } + ty::Adt(def, args) => self.print_def_path(def.did(), args)?, ty::Dynamic(data, r, repr) => { let print_r = self.should_print_region(r); if print_r { - p!("("); + write!(self, "(")?; } match repr { - ty::Dyn => p!("dyn "), + ty::Dyn => write!(self, "dyn ")?, } - p!(print(data)); + data.print(self)?; if print_r { - p!(" + ", print(r), ")"); + write!(self, " + ")?; + r.print(self)?; + write!(self, ")")?; } } - ty::Foreign(def_id) => { - p!(print_def_path(def_id, &[])); - } - ty::Alias(ty::Projection | ty::Inherent | ty::Free, ref data) => { - p!(print(data)) - } - ty::Placeholder(placeholder) => p!(print(placeholder)), + ty::Foreign(def_id) => self.print_def_path(def_id, &[])?, + ty::Alias(ty::Projection | ty::Inherent | ty::Free, ref data) => data.print(self)?, + ty::Placeholder(placeholder) => placeholder.print(self)?, ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => { // We use verbose printing in 'NO_QUERIES' mode, to // avoid needing to call `predicates_of`. This should @@ -834,7 +806,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { // example.] if self.should_print_verbose() { // FIXME(eddyb) print this with `print_def_path`. - p!(write("Opaque({:?}, {})", def_id, args.print_as_list())); + write!(self, "Opaque({:?}, {})", def_id, args.print_as_list())?; return Ok(()); } @@ -849,17 +821,17 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { if d == def_id { // If the type alias directly starts with the `impl` of the // opaque type we're printing, then skip the `::{opaque#1}`. - p!(print_def_path(parent, args)); + self.print_def_path(parent, args)?; return Ok(()); } } // Complex opaque type, e.g. `type Foo = (i32, impl Debug);` - p!(print_def_path(def_id, args)); + self.print_def_path(def_id, args)?; return Ok(()); } _ => { if with_reduced_queries() { - p!(print_def_path(def_id, &[])); + self.print_def_path(def_id, &[])?; return Ok(()); } else { return self.pretty_print_opaque_impl_type(def_id, args); @@ -867,9 +839,9 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } } } - ty::Str => p!("str"), + ty::Str => write!(self, "str")?, ty::Coroutine(did, args) => { - p!("{{"); + write!(self, "{{")?; let coroutine_kind = self.tcx().coroutine_kind(did).unwrap(); let should_print_movability = self.should_print_verbose() || matches!(coroutine_kind, hir::CoroutineKind::Coroutine(_)); @@ -877,12 +849,12 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { if should_print_movability { match coroutine_kind.movability() { hir::Movability::Movable => {} - hir::Movability::Static => p!("static "), + hir::Movability::Static => write!(self, "static ")?, } } if !self.should_print_verbose() { - p!(write("{}", coroutine_kind)); + write!(self, "{coroutine_kind}")?; if coroutine_kind.is_fn_like() { // If we are printing an `async fn` coroutine type, then give the path // of the fn, instead of its span, because that will in most cases be @@ -891,66 +863,71 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { // This will look like: // {async fn body of some_fn()} let did_of_the_fn_item = self.tcx().parent(did); - p!(" of ", print_def_path(did_of_the_fn_item, args), "()"); + write!(self, " of ")?; + self.print_def_path(did_of_the_fn_item, args)?; + write!(self, "()")?; } else if let Some(local_did) = did.as_local() { let span = self.tcx().def_span(local_did); - p!(write( + write!( + self, "@{}", // This may end up in stderr diagnostics but it may also be emitted // into MIR. Hence we use the remapped path if available self.tcx().sess.source_map().span_to_embeddable_string(span) - )); + )?; } else { - p!("@", print_def_path(did, args)); + write!(self, "@")?; + self.print_def_path(did, args)?; } } else { - p!(print_def_path(did, args)); - p!( - " upvar_tys=", - print(args.as_coroutine().tupled_upvars_ty()), - " resume_ty=", - print(args.as_coroutine().resume_ty()), - " yield_ty=", - print(args.as_coroutine().yield_ty()), - " return_ty=", - print(args.as_coroutine().return_ty()) - ); + self.print_def_path(did, args)?; + write!(self, " upvar_tys=")?; + args.as_coroutine().tupled_upvars_ty().print(self)?; + write!(self, " resume_ty=")?; + args.as_coroutine().resume_ty().print(self)?; + write!(self, " yield_ty=")?; + args.as_coroutine().yield_ty().print(self)?; + write!(self, " return_ty=")?; + args.as_coroutine().return_ty().print(self)?; } - p!("}}") + write!(self, "}}")? } ty::CoroutineWitness(did, args) => { - p!(write("{{")); + write!(self, "{{")?; if !self.tcx().sess.verbose_internals() { - p!("coroutine witness"); + write!(self, "coroutine witness")?; if let Some(did) = did.as_local() { let span = self.tcx().def_span(did); - p!(write( + write!( + self, "@{}", // This may end up in stderr diagnostics but it may also be emitted // into MIR. Hence we use the remapped path if available self.tcx().sess.source_map().span_to_embeddable_string(span) - )); + )?; } else { - p!(write("@"), print_def_path(did, args)); + write!(self, "@")?; + self.print_def_path(did, args)?; } } else { - p!(print_def_path(did, args)); + self.print_def_path(did, args)?; } - p!("}}") + write!(self, "}}")? } ty::Closure(did, args) => { - p!(write("{{")); + write!(self, "{{")?; if !self.should_print_verbose() { - p!(write("closure")); + write!(self, "closure")?; if self.should_truncate() { write!(self, "@...}}")?; return Ok(()); } else { if let Some(did) = did.as_local() { if self.tcx().sess.opts.unstable_opts.span_free_formats { - p!("@", print_def_path(did.to_def_id(), args)); + write!(self, "@")?; + self.print_def_path(did.to_def_id(), args)?; } else { let span = self.tcx().def_span(did); let preference = if with_forced_trimmed_paths() { @@ -958,54 +935,56 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } else { FileNameDisplayPreference::Remapped }; - p!(write( + write!( + self, "@{}", - // This may end up in stderr diagnostics but it may also be emitted - // into MIR. Hence we use the remapped path if available + // This may end up in stderr diagnostics but it may also be + // emitted into MIR. Hence we use the remapped path if + // available self.tcx().sess.source_map().span_to_string(span, preference) - )); + )?; } } else { - p!(write("@"), print_def_path(did, args)); + write!(self, "@")?; + self.print_def_path(did, args)?; } } } else { - p!(print_def_path(did, args)); - p!( - " closure_kind_ty=", - print(args.as_closure().kind_ty()), - " closure_sig_as_fn_ptr_ty=", - print(args.as_closure().sig_as_fn_ptr_ty()), - " upvar_tys=", - print(args.as_closure().tupled_upvars_ty()) - ); + self.print_def_path(did, args)?; + write!(self, " closure_kind_ty=")?; + args.as_closure().kind_ty().print(self)?; + write!(self, " closure_sig_as_fn_ptr_ty=")?; + args.as_closure().sig_as_fn_ptr_ty().print(self)?; + write!(self, " upvar_tys=")?; + args.as_closure().tupled_upvars_ty().print(self)?; } - p!("}}"); + write!(self, "}}")?; } ty::CoroutineClosure(did, args) => { - p!(write("{{")); + write!(self, "{{")?; if !self.should_print_verbose() { match self.tcx().coroutine_kind(self.tcx().coroutine_for_closure(did)).unwrap() { hir::CoroutineKind::Desugared( hir::CoroutineDesugaring::Async, hir::CoroutineSource::Closure, - ) => p!("async closure"), + ) => write!(self, "async closure")?, hir::CoroutineKind::Desugared( hir::CoroutineDesugaring::AsyncGen, hir::CoroutineSource::Closure, - ) => p!("async gen closure"), + ) => write!(self, "async gen closure")?, hir::CoroutineKind::Desugared( hir::CoroutineDesugaring::Gen, hir::CoroutineSource::Closure, - ) => p!("gen closure"), + ) => write!(self, "gen closure")?, _ => unreachable!( "coroutine from coroutine-closure should have CoroutineSource::Closure" ), } if let Some(did) = did.as_local() { if self.tcx().sess.opts.unstable_opts.span_free_formats { - p!("@", print_def_path(did.to_def_id(), args)); + write!(self, "@")?; + self.print_def_path(did.to_def_id(), args)?; } else { let span = self.tcx().def_span(did); let preference = if with_forced_trimmed_paths() { @@ -1013,33 +992,43 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } else { FileNameDisplayPreference::Remapped }; - p!(write( + write!( + self, "@{}", // This may end up in stderr diagnostics but it may also be emitted // into MIR. Hence we use the remapped path if available self.tcx().sess.source_map().span_to_string(span, preference) - )); + )?; } } else { - p!(write("@"), print_def_path(did, args)); + write!(self, "@")?; + self.print_def_path(did, args)?; } } else { - p!(print_def_path(did, args)); - p!( - " closure_kind_ty=", - print(args.as_coroutine_closure().kind_ty()), - " signature_parts_ty=", - print(args.as_coroutine_closure().signature_parts_ty()), - " upvar_tys=", - print(args.as_coroutine_closure().tupled_upvars_ty()), - " coroutine_captures_by_ref_ty=", - print(args.as_coroutine_closure().coroutine_captures_by_ref_ty()) - ); + self.print_def_path(did, args)?; + write!(self, " closure_kind_ty=")?; + args.as_coroutine_closure().kind_ty().print(self)?; + write!(self, " signature_parts_ty=")?; + args.as_coroutine_closure().signature_parts_ty().print(self)?; + write!(self, " upvar_tys=")?; + args.as_coroutine_closure().tupled_upvars_ty().print(self)?; + write!(self, " coroutine_captures_by_ref_ty=")?; + args.as_coroutine_closure().coroutine_captures_by_ref_ty().print(self)?; } - p!("}}"); + write!(self, "}}")?; + } + ty::Array(ty, sz) => { + write!(self, "[")?; + ty.print(self)?; + write!(self, "; ")?; + sz.print(self)?; + write!(self, "]")?; + } + ty::Slice(ty) => { + write!(self, "[")?; + ty.print(self)?; + write!(self, "]")?; } - ty::Array(ty, sz) => p!("[", print(ty), "; ", print(sz), "]"), - ty::Slice(ty) => p!("[", print(ty), "]"), } Ok(()) @@ -1138,24 +1127,24 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { &bound_args_and_self_ty, WrapBinderMode::ForAll, |(args, _), p| { - define_scoped_printer!(p); - p!(write("{}", tcx.item_name(trait_def_id))); - p!("("); + write!(p, "{}", tcx.item_name(trait_def_id))?; + write!(p, "(")?; for (idx, ty) in args.iter().enumerate() { if idx > 0 { - p!(", "); + write!(p, ", ")?; } - p!(print(ty)); + ty.print(p)?; } - p!(")"); + write!(p, ")")?; if let Some(ty) = return_ty.skip_binder().as_type() { if !ty.is_unit() { - p!(" -> ", print(return_ty)); + write!(p, " -> ")?; + return_ty.print(p)?; } } - p!(write("{}", if paren_needed { ")" } else { "" })); + write!(p, "{}", if paren_needed { ")" } else { "" })?; first = false; Ok(()) @@ -1182,12 +1171,10 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { write!(self, "{}", if first { "" } else { " + " })?; self.wrap_binder(&trait_pred, WrapBinderMode::ForAll, |trait_pred, p| { - define_scoped_printer!(p); - if trait_pred.polarity == ty::PredicatePolarity::Negative { - p!("!"); + write!(p, "!")?; } - p!(print(trait_pred.trait_ref.print_only_trait_name())); + trait_pred.trait_ref.print_only_trait_name().print(p)?; let generics = tcx.generics_of(trait_pred.def_id()); let own_args = generics.own_args_no_defaults(tcx, trait_pred.trait_ref.args); @@ -1197,32 +1184,32 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { for ty in own_args { if first { - p!("<"); + write!(p, "<")?; first = false; } else { - p!(", "); + write!(p, ", ")?; } - p!(print(ty)); + ty.print(p)?; } for (assoc_item_def_id, term) in assoc_items { if first { - p!("<"); + write!(p, "<")?; first = false; } else { - p!(", "); + write!(p, ", ")?; } - p!(write("{} = ", tcx.associated_item(assoc_item_def_id).name())); + write!(p, "{} = ", tcx.associated_item(assoc_item_def_id).name())?; match term.skip_binder().kind() { - TermKind::Ty(ty) => p!(print(ty)), - TermKind::Const(c) => p!(print(c)), + TermKind::Ty(ty) => ty.print(p)?, + TermKind::Const(c) => c.print(p)?, }; } if !first { - p!(">"); + write!(p, ">")?; } } @@ -1389,8 +1376,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { if let Some(bound_principal) = predicates.principal() { self.wrap_binder(&bound_principal, WrapBinderMode::ForAll, |principal, p| { - define_scoped_printer!(p); - p!(print_def_path(principal.def_id, &[])); + p.print_def_path(principal.def_id, &[])?; let mut resugared = false; @@ -1400,11 +1386,11 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { if let ty::Tuple(tys) = principal.args.type_at(0).kind() { let mut projections = predicates.projection_bounds(); if let (Some(proj), None) = (projections.next(), projections.next()) { - p!(pretty_fn_sig( + p.pretty_fn_sig( tys, false, - proj.skip_binder().term.as_type().expect("Return type was a const") - )); + proj.skip_binder().term.as_type().expect("Return type was a const"), + )?; resugared = true; } } @@ -1461,13 +1447,13 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { .sort_by_cached_key(|proj| p.tcx().item_name(proj.def_id).to_string()); if !args.is_empty() || !projections.is_empty() { - p!(generic_delimiters(|p| { + p.generic_delimiters(|p| { p.comma_sep(args.iter().copied())?; if !args.is_empty() && !projections.is_empty() { write!(p, ", ")?; } p.comma_sep(projections.iter().copied()) - })); + })?; } } Ok(()) @@ -1476,8 +1462,6 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { first = false; } - define_scoped_printer!(self); - // Builtin bounds. // FIXME(eddyb) avoid printing twice (needed to ensure // that the auto traits are sorted *and* printed via p). @@ -1494,11 +1478,11 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { for def_id in auto_traits { if !first { - p!(" + "); + write!(self, " + ")?; } first = false; - p!(print_def_path(def_id, &[])); + self.print_def_path(def_id, &[])?; } Ok(()) @@ -1510,18 +1494,18 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { c_variadic: bool, output: Ty<'tcx>, ) -> Result<(), PrintError> { - define_scoped_printer!(self); - - p!("(", comma_sep(inputs.iter().copied())); + write!(self, "(")?; + self.comma_sep(inputs.iter().copied())?; if c_variadic { if !inputs.is_empty() { - p!(", "); + write!(self, ", ")?; } - p!("..."); + write!(self, "...")?; } - p!(")"); + write!(self, ")")?; if !output.is_unit() { - p!(" -> ", print(output)); + write!(self, " -> ")?; + output.print(self)?; } Ok(()) @@ -1532,10 +1516,8 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { ct: ty::Const<'tcx>, print_ty: bool, ) -> Result<(), PrintError> { - define_scoped_printer!(self); - if self.should_print_verbose() { - p!(write("{:?}", ct)); + write!(self, "{ct:?}")?; return Ok(()); } @@ -1543,25 +1525,28 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, args }) => { match self.tcx().def_kind(def) { DefKind::Const | DefKind::AssocConst => { - p!(print_value_path(def, args)) + self.print_value_path(def, args)?; } DefKind::AnonConst => { if def.is_local() && let span = self.tcx().def_span(def) && let Ok(snip) = self.tcx().sess.source_map().span_to_snippet(span) { - p!(write("{}", snip)) + write!(self, "{snip}")?; } else { - // Do not call `print_value_path` as if a parent of this anon const is an impl it will - // attempt to print out the impl trait ref i.e. `::{constant#0}`. This would - // cause printing to enter an infinite recursion if the anon const is in the self type i.e. - // `impl Default for [T; 32 - 1 - 1 - 1] {` - // where we would try to print `<[T; /* print `constant#0` again */] as Default>::{constant#0}` - p!(write( + // Do not call `print_value_path` as if a parent of this anon const is + // an impl it will attempt to print out the impl trait ref i.e. `::{constant#0}`. This would cause printing to enter an + // infinite recursion if the anon const is in the self type i.e. + // `impl Default for [T; 32 - 1 - 1 - 1] {` where we would + // try to print + // `<[T; /* print constant#0 again */] as // Default>::{constant#0}`. + write!( + self, "{}::{}", self.tcx().crate_name(def.krate), self.tcx().def_path(def).to_string_no_crate_verbose() - )) + )?; } } defkind => bug!("`{:?}` has unexpected defkind {:?}", ct, defkind), @@ -1569,11 +1554,11 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } ty::ConstKind::Infer(infer_ct) => match infer_ct { ty::InferConst::Var(ct_vid) if let Some(name) = self.const_infer_name(ct_vid) => { - p!(write("{}", name)) + write!(self, "{name}")?; } _ => write!(self, "_")?, }, - ty::ConstKind::Param(ParamConst { name, .. }) => p!(write("{}", name)), + ty::ConstKind::Param(ParamConst { name, .. }) => write!(self, "{name}")?, ty::ConstKind::Value(cv) => { return self.pretty_print_const_valtree(cv, print_ty); } @@ -1581,11 +1566,11 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { ty::ConstKind::Bound(debruijn, bound_var) => { rustc_type_ir::debug_bound_var(self, debruijn, bound_var)? } - ty::ConstKind::Placeholder(placeholder) => p!(write("{placeholder:?}")), + ty::ConstKind::Placeholder(placeholder) => write!(self, "{placeholder:?}")?, // FIXME(generic_const_exprs): // write out some legible representation of an abstract const? ty::ConstKind::Expr(expr) => self.pretty_print_const_expr(expr, print_ty)?, - ty::ConstKind::Error(_) => p!("{{const error}}"), + ty::ConstKind::Error(_) => write!(self, "{{const error}}")?, }; Ok(()) } @@ -1595,7 +1580,6 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { expr: Expr<'tcx>, print_ty: bool, ) -> Result<(), PrintError> { - define_scoped_printer!(self); match expr.kind { ty::ExprKind::Binop(op) => { let (_, _, c1, c2) = expr.binop_args(); @@ -1634,7 +1618,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { |this| this.pretty_print_const(c1, print_ty), lhs_parenthesized, )?; - p!(write(" {formatted_op} ")); + write!(self, " {formatted_op} ")?; self.maybe_parenthesized( |this| this.pretty_print_const(c2, print_ty), rhs_parenthesized, @@ -1657,7 +1641,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { ty::ConstKind::Expr(_) => true, _ => false, }; - p!(write("{formatted_op}")); + write!(self, "{formatted_op}")?; self.maybe_parenthesized( |this| this.pretty_print_const(ct, print_ty), parenthesized, @@ -1668,7 +1652,9 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { write!(self, "(")?; self.pretty_print_const(fn_def, print_ty)?; - p!(")(", comma_sep(fn_args), ")"); + write!(self, ")(")?; + self.comma_sep(fn_args)?; + write!(self, ")")?; } ty::ExprKind::Cast(kind) => { let (_, value, to_ty) = expr.cast_args(); @@ -1718,8 +1704,6 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { ptr: Pointer, ty: Ty<'tcx>, ) -> Result<(), PrintError> { - define_scoped_printer!(self); - let (prov, offset) = ptr.prov_and_relative_offset(); match ty.kind() { // Byte strings (&[u8; N]) @@ -1734,19 +1718,19 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { if let Ok(byte_str) = alloc.inner().get_bytes_strip_provenance(&self.tcx(), range) { - p!(pretty_print_byte_str(byte_str)) + self.pretty_print_byte_str(byte_str)?; } else { - p!("") + write!(self, "")?; } } // FIXME: for statics, vtables, and functions, we could in principle print more detail. Some(GlobalAlloc::Static(def_id)) => { - p!(write("", def_id)) + write!(self, "")?; } - Some(GlobalAlloc::Function { .. }) => p!(""), - Some(GlobalAlloc::VTable(..)) => p!(""), - Some(GlobalAlloc::TypeId { .. }) => p!(""), - None => p!(""), + Some(GlobalAlloc::Function { .. }) => write!(self, "")?, + Some(GlobalAlloc::VTable(..)) => write!(self, "")?, + Some(GlobalAlloc::TypeId { .. }) => write!(self, "")?, + None => write!(self, "")?, } return Ok(()); } @@ -1778,40 +1762,38 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { ty: Ty<'tcx>, print_ty: bool, ) -> Result<(), PrintError> { - define_scoped_printer!(self); - match ty.kind() { // Bool - ty::Bool if int == ScalarInt::FALSE => p!("false"), - ty::Bool if int == ScalarInt::TRUE => p!("true"), + ty::Bool if int == ScalarInt::FALSE => write!(self, "false")?, + ty::Bool if int == ScalarInt::TRUE => write!(self, "true")?, // Float ty::Float(fty) => match fty { ty::FloatTy::F16 => { let val = Half::try_from(int).unwrap(); - p!(write("{}{}f16", val, if val.is_finite() { "" } else { "_" })) + write!(self, "{}{}f16", val, if val.is_finite() { "" } else { "_" })?; } ty::FloatTy::F32 => { let val = Single::try_from(int).unwrap(); - p!(write("{}{}f32", val, if val.is_finite() { "" } else { "_" })) + write!(self, "{}{}f32", val, if val.is_finite() { "" } else { "_" })?; } ty::FloatTy::F64 => { let val = Double::try_from(int).unwrap(); - p!(write("{}{}f64", val, if val.is_finite() { "" } else { "_" })) + write!(self, "{}{}f64", val, if val.is_finite() { "" } else { "_" })?; } ty::FloatTy::F128 => { let val = Quad::try_from(int).unwrap(); - p!(write("{}{}f128", val, if val.is_finite() { "" } else { "_" })) + write!(self, "{}{}f128", val, if val.is_finite() { "" } else { "_" })?; } }, // Int ty::Uint(_) | ty::Int(_) => { let int = ConstInt::new(int, matches!(ty.kind(), ty::Int(_)), ty.is_ptr_sized_integral()); - if print_ty { p!(write("{:#?}", int)) } else { p!(write("{:?}", int)) } + if print_ty { write!(self, "{int:#?}")? } else { write!(self, "{int:?}")? } } // Char ty::Char if char::try_from(int).is_ok() => { - p!(write("{:?}", char::try_from(int).unwrap())) + write!(self, "{:?}", char::try_from(int).unwrap())?; } // Pointer types ty::Ref(..) | ty::RawPtr(_, _) | ty::FnPtr(..) => { @@ -1827,7 +1809,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } ty::Pat(base_ty, pat) if self.tcx().validate_scalar_in_layout(int, ty) => { self.pretty_print_const_scalar_int(int, *base_ty, print_ty)?; - p!(write(" is {pat:?}")); + write!(self, " is {pat:?}")?; } // Nontrivial types with scalar bit representation _ => { @@ -1876,10 +1858,10 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { cv: ty::Value<'tcx>, print_ty: bool, ) -> Result<(), PrintError> { - define_scoped_printer!(self); - if with_reduced_queries() || self.should_print_verbose() { - p!(write("ValTree({:?}: ", cv.valtree), print(cv.ty), ")"); + write!(self, "ValTree({:?}: ", cv.valtree)?; + cv.ty.print(self)?; + write!(self, ")")?; return Ok(()); } @@ -1900,13 +1882,13 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { let bytes = cv.try_to_raw_bytes(self.tcx()).unwrap_or_else(|| { bug!("expected to convert valtree to raw bytes for type {:?}", cv.ty) }); - p!(write("{:?}", String::from_utf8_lossy(bytes))); + write!(self, "{:?}", String::from_utf8_lossy(bytes))?; return Ok(()); } _ => { let cv = ty::Value { valtree: cv.valtree, ty: inner_ty }; - p!("&"); - p!(pretty_print_const_valtree(cv, print_ty)); + write!(self, "&")?; + self.pretty_print_const_valtree(cv, print_ty)?; return Ok(()); } }, @@ -1914,8 +1896,8 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { let bytes = cv.try_to_raw_bytes(self.tcx()).unwrap_or_else(|| { bug!("expected to convert valtree to raw bytes for type {:?}", t) }); - p!("*"); - p!(pretty_print_byte_str(bytes)); + write!(self, "*")?; + self.pretty_print_byte_str(bytes)?; return Ok(()); } // Aggregates, printed as array/tuple/struct/variant construction syntax. @@ -1928,14 +1910,17 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { let fields = contents.fields.iter().copied(); match *cv.ty.kind() { ty::Array(..) => { - p!("[", comma_sep(fields), "]"); + write!(self, "[")?; + self.comma_sep(fields)?; + write!(self, "]")?; } ty::Tuple(..) => { - p!("(", comma_sep(fields)); + write!(self, "(")?; + self.comma_sep(fields)?; if contents.fields.len() == 1 { - p!(","); + write!(self, ",")?; } - p!(")"); + write!(self, ")")?; } ty::Adt(def, _) if def.variants().is_empty() => { self.typed_value( @@ -1951,23 +1936,26 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { let variant_idx = contents.variant.expect("destructed const of adt without variant idx"); let variant_def = &def.variant(variant_idx); - p!(print_value_path(variant_def.def_id, args)); + self.print_value_path(variant_def.def_id, args)?; match variant_def.ctor_kind() { Some(CtorKind::Const) => {} Some(CtorKind::Fn) => { - p!("(", comma_sep(fields), ")"); + write!(self, "(")?; + self.comma_sep(fields)?; + write!(self, ")")?; } None => { - p!(" {{ "); + write!(self, " {{ ")?; let mut first = true; for (field_def, field) in iter::zip(&variant_def.fields, fields) { if !first { - p!(", "); + write!(self, ", ")?; } - p!(write("{}: ", field_def.name), print(field)); + write!(self, "{}: ", field_def.name)?; + field.print(self)?; first = false; } - p!(" }}"); + write!(self, " }}")?; } } } @@ -1976,7 +1964,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { return Ok(()); } (ty::ValTreeKind::Leaf(leaf), ty::Ref(_, inner_ty, _)) => { - p!(write("&")); + write!(self, "&")?; return self.pretty_print_const_scalar_int(*leaf, inner_ty, print_ty); } (ty::ValTreeKind::Leaf(leaf), _) => { @@ -1984,7 +1972,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } (_, ty::FnDef(def_id, args)) => { // Never allowed today, but we still encounter them in invalid const args. - p!(print_value_path(def_id, args)); + self.print_value_path(def_id, args)?; return Ok(()); } // FIXME(oli-obk): also pretty print arrays and other aggregate constants by reading @@ -1994,12 +1982,13 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { // fallback if cv.valtree.is_zst() { - p!(write("")); + write!(self, "")?; } else { - p!(write("{:?}", cv.valtree)); + write!(self, "{:?}", cv.valtree)?; } if print_ty { - p!(": ", print(cv.ty)); + write!(self, ": ")?; + cv.ty.print(self)?; } Ok(()) } @@ -2013,19 +2002,18 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { write!(self, "impl ")?; self.wrap_binder(&sig, WrapBinderMode::ForAll, |sig, p| { - define_scoped_printer!(p); - - p!(write("{kind}(")); + write!(p, "{kind}(")?; for (i, arg) in sig.inputs()[0].tuple_fields().iter().enumerate() { if i > 0 { - p!(", "); + write!(p, ", ")?; } - p!(print(arg)); + arg.print(p)?; } - p!(")"); + write!(p, ")")?; if !sig.output().is_unit() { - p!(" -> ", print(sig.output())); + write!(p, " -> ")?; + sig.output().print(p)?; } Ok(()) @@ -2036,15 +2024,9 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { &mut self, constness: ty::BoundConstness, ) -> Result<(), PrintError> { - define_scoped_printer!(self); - match constness { - ty::BoundConstness::Const => { - p!("const "); - } - ty::BoundConstness::Maybe => { - p!("[const] "); - } + ty::BoundConstness::Const => write!(self, "const ")?, + ty::BoundConstness::Maybe => write!(self, "[const] ")?, } Ok(()) } @@ -2562,11 +2544,10 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { ty: Ty<'tcx>, ) -> Result<(), PrintError> { let print = |this: &mut Self| { - define_scoped_printer!(this); if this.print_alloc_ids { - p!(write("{:?}", p)); + write!(this, "{p:?}")?; } else { - p!("&_"); + write!(this, "&_")?; } Ok(()) }; @@ -2577,17 +2558,15 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { // HACK(eddyb) limited to `FmtPrinter` because of `region_highlight_mode`. impl<'tcx> FmtPrinter<'_, 'tcx> { pub fn pretty_print_region(&mut self, region: ty::Region<'tcx>) -> Result<(), fmt::Error> { - define_scoped_printer!(self); - // Watch out for region highlights. let highlight = self.region_highlight_mode; if let Some(n) = highlight.region_highlighted(region) { - p!(write("'{}", n)); + write!(self, "'{n}")?; return Ok(()); } if self.should_print_verbose() { - p!(write("{:?}", region)); + write!(self, "{region:?}")?; return Ok(()); } @@ -2599,12 +2578,12 @@ impl<'tcx> FmtPrinter<'_, 'tcx> { // `explain_region()` or `note_and_explain_region()`. match region.kind() { ty::ReEarlyParam(data) => { - p!(write("{}", data.name)); + write!(self, "{}", data.name)?; return Ok(()); } ty::ReLateParam(ty::LateParamRegion { kind, .. }) => { if let Some(name) = kind.get_name(self.tcx) { - p!(write("{}", name)); + write!(self, "{name}")?; return Ok(()); } } @@ -2613,31 +2592,31 @@ impl<'tcx> FmtPrinter<'_, 'tcx> { bound: ty::BoundRegion { kind: br, .. }, .. }) => { if let Some(name) = br.get_name(self.tcx) { - p!(write("{}", name)); + write!(self, "{name}")?; return Ok(()); } if let Some((region, counter)) = highlight.highlight_bound_region { if br == region { - p!(write("'{}", counter)); + write!(self, "'{counter}")?; return Ok(()); } } } ty::ReVar(region_vid) if identify_regions => { - p!(write("{:?}", region_vid)); + write!(self, "{region_vid:?}")?; return Ok(()); } ty::ReVar(_) => {} ty::ReErased => {} ty::ReError(_) => {} ty::ReStatic => { - p!("'static"); + write!(self, "'static")?; return Ok(()); } } - p!("'_"); + write!(self, "'_")?; Ok(()) } @@ -2928,8 +2907,9 @@ where T: Print<'tcx, P>, { fn print(&self, p: &mut P) -> Result<(), PrintError> { - define_scoped_printer!(p); - p!(print(self.0), ": ", print(self.1)); + self.0.print(p)?; + write!(p, ": ")?; + self.1.print(p)?; Ok(()) } } @@ -3084,7 +3064,6 @@ macro_rules! define_print { (($self:ident, $p:ident): $($ty:ty $print:block)+) => { $(impl<'tcx, P: PrettyPrinter<'tcx>> Print<'tcx, P> for $ty { fn print(&$self, $p: &mut P) -> Result<(), PrintError> { - define_scoped_printer!($p); let _: () = $print; Ok(()) } @@ -3110,34 +3089,35 @@ define_print! { (self, p): ty::FnSig<'tcx> { - p!(write("{}", self.safety.prefix_str())); + write!(p, "{}", self.safety.prefix_str())?; if self.abi != ExternAbi::Rust { - p!(write("extern {} ", self.abi)); + write!(p, "extern {} ", self.abi)?; } - p!("fn", pretty_fn_sig(self.inputs(), self.c_variadic, self.output())); + write!(p, "fn")?; + p.pretty_fn_sig(self.inputs(), self.c_variadic, self.output())?; } ty::TraitRef<'tcx> { - p!(write("<{} as {}>", self.self_ty(), self.print_only_trait_path())) + write!(p, "<{} as {}>", self.self_ty(), self.print_only_trait_path())?; } ty::AliasTy<'tcx> { let alias_term: ty::AliasTerm<'tcx> = (*self).into(); - p!(print(alias_term)) + alias_term.print(p)?; } ty::AliasTerm<'tcx> { match self.kind(p.tcx()) { - ty::AliasTermKind::InherentTy | ty::AliasTermKind::InherentConst => p!(pretty_print_inherent_projection(*self)), + ty::AliasTermKind::InherentTy | ty::AliasTermKind::InherentConst => p.pretty_print_inherent_projection(*self)?, ty::AliasTermKind::ProjectionTy => { if !(p.should_print_verbose() || with_reduced_queries()) && p.tcx().is_impl_trait_in_trait(self.def_id) { - p!(pretty_print_rpitit(self.def_id, self.args)) + p.pretty_print_rpitit(self.def_id, self.args)?; } else { - p!(print_def_path(self.def_id, self.args)); + p.print_def_path(self.def_id, self.args)?; } } ty::AliasTermKind::FreeTy @@ -3145,17 +3125,18 @@ define_print! { | ty::AliasTermKind::OpaqueTy | ty::AliasTermKind::UnevaluatedConst | ty::AliasTermKind::ProjectionConst => { - p!(print_def_path(self.def_id, self.args)); + p.print_def_path(self.def_id, self.args)?; } } } ty::TraitPredicate<'tcx> { - p!(print(self.trait_ref.self_ty()), ": "); + self.trait_ref.self_ty().print(p)?; + write!(p, ": ")?; if let ty::PredicatePolarity::Negative = self.polarity { - p!("!"); + write!(p, "!")?; } - p!(print(self.trait_ref.print_trait_sugared())) + self.trait_ref.print_trait_sugared().print(p)?; } ty::HostEffectPredicate<'tcx> { @@ -3163,60 +3144,78 @@ define_print! { ty::BoundConstness::Const => { "const" } ty::BoundConstness::Maybe => { "[const]" } }; - p!(print(self.trait_ref.self_ty()), ": {constness} "); - p!(print(self.trait_ref.print_trait_sugared())) + self.trait_ref.self_ty().print(p)?; + write!(p, ": {constness} ")?; + self.trait_ref.print_trait_sugared().print(p)?; } ty::TypeAndMut<'tcx> { - p!(write("{}", self.mutbl.prefix_str()), print(self.ty)) + write!(p, "{}", self.mutbl.prefix_str())?; + self.ty.print(p)?; } ty::ClauseKind<'tcx> { match *self { - ty::ClauseKind::Trait(ref data) => { - p!(print(data)) - } - ty::ClauseKind::RegionOutlives(predicate) => p!(print(predicate)), - ty::ClauseKind::TypeOutlives(predicate) => p!(print(predicate)), - ty::ClauseKind::Projection(predicate) => p!(print(predicate)), - ty::ClauseKind::HostEffect(predicate) => p!(print(predicate)), + ty::ClauseKind::Trait(ref data) => data.print(p)?, + ty::ClauseKind::RegionOutlives(predicate) => predicate.print(p)?, + ty::ClauseKind::TypeOutlives(predicate) => predicate.print(p)?, + ty::ClauseKind::Projection(predicate) => predicate.print(p)?, + ty::ClauseKind::HostEffect(predicate) => predicate.print(p)?, ty::ClauseKind::ConstArgHasType(ct, ty) => { - p!("the constant `", print(ct), "` has type `", print(ty), "`") + write!(p, "the constant `")?; + ct.print(p)?; + write!(p, "` has type `")?; + ty.print(p)?; + write!(p, "`")?; }, - ty::ClauseKind::WellFormed(term) => p!(print(term), " well-formed"), + ty::ClauseKind::WellFormed(term) => { + term.print(p)?; + write!(p, " well-formed")?; + } ty::ClauseKind::ConstEvaluatable(ct) => { - p!("the constant `", print(ct), "` can be evaluated") + write!(p, "the constant `")?; + ct.print(p)?; + write!(p, "` can be evaluated")?; + } + ty::ClauseKind::UnstableFeature(symbol) => { + write!(p, "unstable feature: ")?; + write!(p, "`{symbol}`")?; } - ty::ClauseKind::UnstableFeature(symbol) => p!("unstable feature: ", write("`{}`", symbol)), } } ty::PredicateKind<'tcx> { match *self { - ty::PredicateKind::Clause(data) => { - p!(print(data)) - } - ty::PredicateKind::Subtype(predicate) => p!(print(predicate)), - ty::PredicateKind::Coerce(predicate) => p!(print(predicate)), + ty::PredicateKind::Clause(data) => data.print(p)?, + ty::PredicateKind::Subtype(predicate) => predicate.print(p)?, + ty::PredicateKind::Coerce(predicate) => predicate.print(p)?, ty::PredicateKind::DynCompatible(trait_def_id) => { - p!("the trait `", print_def_path(trait_def_id, &[]), "` is dyn-compatible") + write!(p, "the trait `")?; + p.print_def_path(trait_def_id, &[])?; + write!(p, "` is dyn-compatible")?; } ty::PredicateKind::ConstEquate(c1, c2) => { - p!("the constant `", print(c1), "` equals `", print(c2), "`") + write!(p, "the constant `")?; + c1.print(p)?; + write!(p, "` equals `")?; + c2.print(p)?; + write!(p, "`")?; + } + ty::PredicateKind::Ambiguous => write!(p, "ambiguous")?, + ty::PredicateKind::NormalizesTo(data) => data.print(p)?, + ty::PredicateKind::AliasRelate(t1, t2, dir) => { + t1.print(p)?; + write!(p, " {dir} ")?; + t2.print(p)?; } - ty::PredicateKind::Ambiguous => p!("ambiguous"), - ty::PredicateKind::NormalizesTo(data) => p!(print(data)), - ty::PredicateKind::AliasRelate(t1, t2, dir) => p!(print(t1), write(" {} ", dir), print(t2)), } } ty::ExistentialPredicate<'tcx> { match *self { - ty::ExistentialPredicate::Trait(x) => p!(print(x)), - ty::ExistentialPredicate::Projection(x) => p!(print(x)), - ty::ExistentialPredicate::AutoTrait(def_id) => { - p!(print_def_path(def_id, &[])); - } + ty::ExistentialPredicate::Trait(x) => x.print(p)?, + ty::ExistentialPredicate::Projection(x) => x.print(p)?, + ty::ExistentialPredicate::AutoTrait(def_id) => p.print_def_path(def_id, &[])?, } } @@ -3224,7 +3223,7 @@ define_print! { // Use a type that can't appear in defaults of type parameters. let dummy_self = Ty::new_fresh(p.tcx(), 0); let trait_ref = self.with_self_ty(p.tcx(), dummy_self); - p!(print(trait_ref.print_only_trait_path())) + trait_ref.print_only_trait_path().print(p)?; } ty::ExistentialProjection<'tcx> { @@ -3232,31 +3231,37 @@ define_print! { // The args don't contain the self ty (as it has been erased) but the corresp. // generics do as the trait always has a self ty param. We need to offset. let args = &self.args[p.tcx().generics_of(self.def_id).parent_count - 1..]; - p!(path_generic_args(|p| write!(p, "{name}"), args), " = ", print(self.term)) + p.path_generic_args(|p| write!(p, "{name}"), args)?; + write!(p, " = ")?; + self.term.print(p)?; } ty::ProjectionPredicate<'tcx> { - p!(print(self.projection_term), " == "); + self.projection_term.print(p)?; + write!(p, " == ")?; p.reset_type_limit(); - p!(print(self.term)) + self.term.print(p)?; } ty::SubtypePredicate<'tcx> { - p!(print(self.a), " <: "); + self.a.print(p)?; + write!(p, " <: ")?; p.reset_type_limit(); - p!(print(self.b)) + self.b.print(p)?; } ty::CoercePredicate<'tcx> { - p!(print(self.a), " -> "); + self.a.print(p)?; + write!(p, " -> ")?; p.reset_type_limit(); - p!(print(self.b)) + self.b.print(p)?; } ty::NormalizesTo<'tcx> { - p!(print(self.alias), " normalizes-to "); + self.alias.print(p)?; + write!(p, " normalizes-to ")?; p.reset_type_limit(); - p!(print(self.term)) + self.term.print(p)?; } } @@ -3264,11 +3269,13 @@ define_print_and_forward_display! { (self, p): &'tcx ty::List> { - p!("{{", comma_sep(self.iter()), "}}") + write!(p, "{{")?; + p.comma_sep(self.iter())?; + write!(p, "}}")?; } TraitRefPrintOnlyTraitPath<'tcx> { - p!(print_def_path(self.0.def_id, self.0.args)); + p.print_def_path(self.0.def_id, self.0.args)?; } TraitRefPrintSugared<'tcx> { @@ -3276,83 +3283,84 @@ define_print_and_forward_display! { && p.tcx().trait_def(self.0.def_id).paren_sugar && let ty::Tuple(args) = self.0.args.type_at(1).kind() { - p!(write("{}", p.tcx().item_name(self.0.def_id)), "("); + write!(p, "{}(", p.tcx().item_name(self.0.def_id))?; for (i, arg) in args.iter().enumerate() { if i > 0 { - p!(", "); + write!(p, ", ")?; } - p!(print(arg)); + arg.print(p)?; } - p!(")"); + write!(p, ")")?; } else { - p!(print_def_path(self.0.def_id, self.0.args)); + p.print_def_path(self.0.def_id, self.0.args)?; } } TraitRefPrintOnlyTraitName<'tcx> { - p!(print_def_path(self.0.def_id, &[])); + p.print_def_path(self.0.def_id, &[])?; } TraitPredPrintModifiersAndPath<'tcx> { if let ty::PredicatePolarity::Negative = self.0.polarity { - p!("!") + write!(p, "!")?; } - p!(print(self.0.trait_ref.print_trait_sugared())); + self.0.trait_ref.print_trait_sugared().print(p)?; } TraitPredPrintWithBoundConstness<'tcx> { - p!(print(self.0.trait_ref.self_ty()), ": "); + self.0.trait_ref.self_ty().print(p)?; + write!(p, ": ")?; if let Some(constness) = self.1 { - p!(pretty_print_bound_constness(constness)); + p.pretty_print_bound_constness(constness)?; } if let ty::PredicatePolarity::Negative = self.0.polarity { - p!("!"); + write!(p, "!")?; } - p!(print(self.0.trait_ref.print_trait_sugared())) + self.0.trait_ref.print_trait_sugared().print(p)?; } PrintClosureAsImpl<'tcx> { - p!(pretty_closure_as_impl(self.closure)) + p.pretty_closure_as_impl(self.closure)?; } ty::ParamTy { - p!(write("{}", self.name)) + write!(p, "{}", self.name)?; } ty::PlaceholderType { match self.bound.kind { - ty::BoundTyKind::Anon => p!(write("{self:?}")), + ty::BoundTyKind::Anon => write!(p, "{self:?}")?, ty::BoundTyKind::Param(def_id) => match p.should_print_verbose() { - true => p!(write("{self:?}")), - false => p!(write("{}", p.tcx().item_name(def_id))), + true => write!(p, "{self:?}")?, + false => write!(p, "{}", p.tcx().item_name(def_id))?, }, } } ty::ParamConst { - p!(write("{}", self.name)) + write!(p, "{}", self.name)?; } ty::Term<'tcx> { match self.kind() { - ty::TermKind::Ty(ty) => p!(print(ty)), - ty::TermKind::Const(c) => p!(print(c)), + ty::TermKind::Ty(ty) => ty.print(p)?, + ty::TermKind::Const(c) => c.print(p)?, } } ty::Predicate<'tcx> { - p!(print(self.kind())) + self.kind().print(p)?; } ty::Clause<'tcx> { - p!(print(self.kind())) + self.kind().print(p)?; } GenericArg<'tcx> { match self.kind() { - GenericArgKind::Lifetime(lt) => p!(print(lt)), - GenericArgKind::Type(ty) => p!(print(ty)), - GenericArgKind::Const(ct) => p!(print(ct)), + GenericArgKind::Lifetime(lt) => lt.print(p)?, + GenericArgKind::Type(ty) => ty.print(p)?, + GenericArgKind::Const(ct) => ct.print(p)?, } } } From 2434d8cecfb228093277c1b4506ae47db1972de9 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 1 Aug 2025 19:01:32 +1000 Subject: [PATCH 088/118] Remove unused arg from `path_append_impl`. None of the impls use it. --- compiler/rustc_const_eval/src/util/type_name.rs | 1 - compiler/rustc_lint/src/context.rs | 1 - compiler/rustc_middle/src/ty/print/mod.rs | 8 +------- compiler/rustc_middle/src/ty/print/pretty.rs | 1 - compiler/rustc_symbol_mangling/src/legacy.rs | 1 - compiler/rustc_symbol_mangling/src/v0.rs | 1 - .../src/error_reporting/infer/mod.rs | 1 - 7 files changed, 1 insertion(+), 13 deletions(-) diff --git a/compiler/rustc_const_eval/src/util/type_name.rs b/compiler/rustc_const_eval/src/util/type_name.rs index 400ba23ae5f9b..e6b9759819f1b 100644 --- a/compiler/rustc_const_eval/src/util/type_name.rs +++ b/compiler/rustc_const_eval/src/util/type_name.rs @@ -89,7 +89,6 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { fn path_append_impl( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, - _disambiguated_data: &DisambiguatedDefPathData, self_ty: Ty<'tcx>, trait_ref: Option>, ) -> Result<(), PrintError> { diff --git a/compiler/rustc_lint/src/context.rs b/compiler/rustc_lint/src/context.rs index 7e35d4d142bd8..11181d10af5ee 100644 --- a/compiler/rustc_lint/src/context.rs +++ b/compiler/rustc_lint/src/context.rs @@ -803,7 +803,6 @@ impl<'tcx> LateContext<'tcx> { fn path_append_impl( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, - _disambiguated_data: &DisambiguatedDefPathData, self_ty: Ty<'tcx>, trait_ref: Option>, ) -> Result<(), PrintError> { diff --git a/compiler/rustc_middle/src/ty/print/mod.rs b/compiler/rustc_middle/src/ty/print/mod.rs index 1fee9d945f65c..8a125c7fe2840 100644 --- a/compiler/rustc_middle/src/ty/print/mod.rs +++ b/compiler/rustc_middle/src/ty/print/mod.rs @@ -88,7 +88,6 @@ pub trait Printer<'tcx>: Sized { fn path_append_impl( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, - disambiguated_data: &DisambiguatedDefPathData, self_ty: Ty<'tcx>, trait_ref: Option>, ) -> Result<(), PrintError>; @@ -236,12 +235,7 @@ pub trait Printer<'tcx>: Sized { // If the impl is not co-located with either self-type or // trait-type, then fallback to a format that identifies // the module more clearly. - self.path_append_impl( - |p| p.print_def_path(parent_def_id, &[]), - &key.disambiguated_data, - self_ty, - impl_trait_ref, - ) + self.path_append_impl(|p| p.print_def_path(parent_def_id, &[]), self_ty, impl_trait_ref) } else { // Otherwise, try to give a good form that would be valid language // syntax. Preferably using associated item notation. diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 538179245c426..033f1e6cd0641 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -2340,7 +2340,6 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> { fn path_append_impl( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, - _disambiguated_data: &DisambiguatedDefPathData, self_ty: Ty<'tcx>, trait_ref: Option>, ) -> Result<(), PrintError> { diff --git a/compiler/rustc_symbol_mangling/src/legacy.rs b/compiler/rustc_symbol_mangling/src/legacy.rs index d1834abb32b07..aa8292c050440 100644 --- a/compiler/rustc_symbol_mangling/src/legacy.rs +++ b/compiler/rustc_symbol_mangling/src/legacy.rs @@ -332,7 +332,6 @@ impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> { fn path_append_impl( &mut self, print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, - _disambiguated_data: &DisambiguatedDefPathData, self_ty: Ty<'tcx>, trait_ref: Option>, ) -> Result<(), PrintError> { diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs index a34d8b4436ed2..c2458ae814b71 100644 --- a/compiler/rustc_symbol_mangling/src/v0.rs +++ b/compiler/rustc_symbol_mangling/src/v0.rs @@ -846,7 +846,6 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { fn path_append_impl( &mut self, _: impl FnOnce(&mut Self) -> Result<(), PrintError>, - _: &DisambiguatedDefPathData, _: Ty<'tcx>, _: Option>, ) -> Result<(), PrintError> { diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs index 8daa5d5347a0e..ed8229154a9bf 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs @@ -269,7 +269,6 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { fn path_append_impl( &mut self, _print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, - _disambiguated_data: &DisambiguatedDefPathData, _self_ty: Ty<'tcx>, _trait_ref: Option>, ) -> Result<(), PrintError> { From 277cf46d3e4aecbaa393507c0c9612da5fc69c37 Mon Sep 17 00:00:00 2001 From: Hmikihiro <34ttrweoewiwe28@gmail.com> Date: Sun, 3 Aug 2025 19:38:29 +0900 Subject: [PATCH 089/118] Remove unused functions from edit_in_place --- .../crates/syntax/src/ast/edit_in_place.rs | 163 +----------------- 1 file changed, 2 insertions(+), 161 deletions(-) diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs index f01ac081c8bdd..b50ce6442432d 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -9,11 +9,11 @@ use crate::{ SyntaxKind::{ATTR, COMMENT, WHITESPACE}, SyntaxNode, SyntaxToken, algo::{self, neighbor}, - ast::{self, HasGenericArgs, HasGenericParams, edit::IndentLevel, make}, + ast::{self, HasGenericParams, edit::IndentLevel, make}, ted::{self, Position}, }; -use super::{GenericParam, HasArgList, HasName}; +use super::{GenericParam, HasName}; pub trait GenericParamsOwnerEdit: ast::HasGenericParams { fn get_or_create_generic_param_list(&self) -> ast::GenericParamList; @@ -419,34 +419,6 @@ impl Removable for ast::TypeBoundList { } } -impl ast::PathSegment { - pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList { - if self.generic_arg_list().is_none() { - let arg_list = make::generic_arg_list(empty()).clone_for_update(); - ted::append_child(self.syntax(), arg_list.syntax()); - } - self.generic_arg_list().unwrap() - } -} - -impl ast::MethodCallExpr { - pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList { - if self.generic_arg_list().is_none() { - let generic_arg_list = make::turbofish_generic_arg_list(empty()).clone_for_update(); - - if let Some(arg_list) = self.arg_list() { - ted::insert_raw( - ted::Position::before(arg_list.syntax()), - generic_arg_list.syntax(), - ); - } else { - ted::append_child(self.syntax(), generic_arg_list.syntax()); - } - } - self.generic_arg_list().unwrap() - } -} - impl Removable for ast::UseTree { fn remove(&self) { for dir in [Direction::Next, Direction::Prev] { @@ -677,106 +649,6 @@ impl ast::AssocItemList { ]; ted::insert_all(position, elements); } - - /// Adds a new associated item at the start of the associated item list. - /// - /// Attention! This function does align the first line of `item` with respect to `self`, - /// but it does _not_ change indentation of other lines (if any). - pub fn add_item_at_start(&self, item: ast::AssocItem) { - match self.assoc_items().next() { - Some(first_item) => { - let indent = IndentLevel::from_node(first_item.syntax()); - let before = Position::before(first_item.syntax()); - - ted::insert_all( - before, - vec![ - item.syntax().clone().into(), - make::tokens::whitespace(&format!("\n\n{indent}")).into(), - ], - ) - } - None => { - let (indent, position, whitespace) = match self.l_curly_token() { - Some(l_curly) => { - normalize_ws_between_braces(self.syntax()); - (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n") - } - None => (IndentLevel::single(), Position::first_child_of(self.syntax()), ""), - }; - - let mut elements = vec![]; - - // Avoid pushing an empty whitespace token - if !indent.is_zero() || !whitespace.is_empty() { - elements.push(make::tokens::whitespace(&format!("{whitespace}{indent}")).into()) - } - elements.push(item.syntax().clone().into()); - - ted::insert_all(position, elements) - } - }; - } -} - -impl ast::Fn { - pub fn get_or_create_body(&self) -> ast::BlockExpr { - if self.body().is_none() { - let body = make::ext::empty_block_expr().clone_for_update(); - match self.semicolon_token() { - Some(semi) => { - ted::replace(semi, body.syntax()); - ted::insert(Position::before(body.syntax), make::tokens::single_space()); - } - None => ted::append_child(self.syntax(), body.syntax()), - } - } - self.body().unwrap() - } -} - -impl ast::LetStmt { - pub fn set_ty(&self, ty: Option) { - match ty { - None => { - if let Some(colon_token) = self.colon_token() { - ted::remove(colon_token); - } - - if let Some(existing_ty) = self.ty() { - if let Some(sibling) = existing_ty.syntax().prev_sibling_or_token() - && sibling.kind() == SyntaxKind::WHITESPACE - { - ted::remove(sibling); - } - - ted::remove(existing_ty.syntax()); - } - - // Remove any trailing ws - if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE) - { - last.detach(); - } - } - Some(new_ty) => { - if self.colon_token().is_none() { - ted::insert_raw( - Position::after( - self.pat().expect("let stmt should have a pattern").syntax(), - ), - make::token(T![:]), - ); - } - - if let Some(old_ty) = self.ty() { - ted::replace(old_ty.syntax(), new_ty.syntax()); - } else { - ted::insert(Position::after(self.colon_token().unwrap()), new_ty.syntax()); - } - } - } - } } impl ast::RecordExprFieldList { @@ -1091,35 +963,4 @@ mod tests { check("let a @ ()", "let a", None); check("let a @ ", "let a", None); } - - #[test] - fn test_let_stmt_set_ty() { - #[track_caller] - fn check(before: &str, expected: &str, ty: Option) { - let ty = ty.map(|it| it.clone_for_update()); - - let let_stmt = ast_mut_from_text::(&format!("fn f() {{ {before} }}")); - let_stmt.set_ty(ty); - - let after = ast_mut_from_text::(&format!("fn f() {{ {expected} }}")); - assert_eq!(let_stmt.to_string(), after.to_string(), "{let_stmt:#?}\n!=\n{after:#?}"); - } - - // adding - check("let a;", "let a: ();", Some(make::ty_tuple([]))); - // no semicolon due to it being eaten during error recovery - check("let a:", "let a: ()", Some(make::ty_tuple([]))); - - // replacing - check("let a: u8;", "let a: ();", Some(make::ty_tuple([]))); - check("let a: u8 = 3;", "let a: () = 3;", Some(make::ty_tuple([]))); - check("let a: = 3;", "let a: () = 3;", Some(make::ty_tuple([]))); - - // removing - check("let a: u8;", "let a;", None); - check("let a:;", "let a;", None); - - check("let a: u8 = 3;", "let a = 3;", None); - check("let a: = 3;", "let a = 3;", None); - } } From 754654d5a98a81a2f4f033a1cf4b2f9e7bb55f06 Mon Sep 17 00:00:00 2001 From: Kivooeo Date: Sun, 3 Aug 2025 17:59:37 +0500 Subject: [PATCH 090/118] rename rust_panic_without_hook --- library/std/src/panic.rs | 2 +- library/std/src/panicking.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/library/std/src/panic.rs b/library/std/src/panic.rs index 234fb284a5904..913ef72f67464 100644 --- a/library/std/src/panic.rs +++ b/library/std/src/panic.rs @@ -388,7 +388,7 @@ pub fn catch_unwind R + UnwindSafe, R>(f: F) -> Result { /// ``` #[stable(feature = "resume_unwind", since = "1.9.0")] pub fn resume_unwind(payload: Box) -> ! { - panicking::rust_panic_without_hook(payload) + panicking::resume_unwind(payload) } /// Makes all future panics abort directly without running the panic hook or unwinding. diff --git a/library/std/src/panicking.rs b/library/std/src/panicking.rs index 7873049d20bfd..bb399fc17734b 100644 --- a/library/std/src/panicking.rs +++ b/library/std/src/panicking.rs @@ -861,7 +861,7 @@ fn rust_panic_with_hook( /// This is the entry point for `resume_unwind`. /// It just forwards the payload to the panic runtime. #[cfg_attr(feature = "panic_immediate_abort", inline)] -pub fn rust_panic_without_hook(payload: Box) -> ! { +pub fn resume_unwind(payload: Box) -> ! { panic_count::increase(false); struct RewrapBox(Box); From 035ff85042ed1b61e98346229c4aa059a3c87c69 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Sun, 3 Aug 2025 11:58:13 -0400 Subject: [PATCH 091/118] Fix intcast to use the is_signed parameter --- src/builder.rs | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index 3cd464b61e142..dac3c3dcbb1bb 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -1278,11 +1278,19 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { fn intcast( &mut self, - value: RValue<'gcc>, + mut value: RValue<'gcc>, dest_typ: Type<'gcc>, - _is_signed: bool, + is_signed: bool, ) -> RValue<'gcc> { - // NOTE: is_signed is for value, not dest_typ. + let value_type = value.get_type(); + if is_signed && !value_type.is_signed(self.cx) { + let signed_type = value_type.to_signed(self.cx); + value = self.gcc_int_cast(value, signed_type); + } else if !is_signed && value_type.is_signed(self.cx) { + let unsigned_type = value_type.to_unsigned(self.cx); + value = self.gcc_int_cast(value, unsigned_type); + } + self.gcc_int_cast(value, dest_typ) } From 7d78968bd053eb7811126b2bad991579fb78cc3a Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Fri, 1 Aug 2025 00:23:36 +0900 Subject: [PATCH 092/118] fix: Error on illegal `[const]`s inside blocks within legal positions --- compiler/rustc_ast_passes/messages.ftl | 4 ++ .../rustc_ast_passes/src/ast_validation.rs | 47 ++++++++++++------- compiler/rustc_ast_passes/src/errors.rs | 20 ++++++++ .../conditionally-const-in-anon-const.rs | 28 +++++++++++ .../conditionally-const-in-anon-const.stderr | 32 +++++++++++++ .../conditionally-const-in-struct-args.rs | 21 --------- .../conditionally-const-invalid-places.stderr | 30 ++++++++++-- 7 files changed, 140 insertions(+), 42 deletions(-) create mode 100644 tests/ui/traits/const-traits/conditionally-const-in-anon-const.rs create mode 100644 tests/ui/traits/const-traits/conditionally-const-in-anon-const.stderr delete mode 100644 tests/ui/traits/const-traits/conditionally-const-in-struct-args.rs diff --git a/compiler/rustc_ast_passes/messages.ftl b/compiler/rustc_ast_passes/messages.ftl index af93d55c89826..42f3569f0f1e1 100644 --- a/compiler/rustc_ast_passes/messages.ftl +++ b/compiler/rustc_ast_passes/messages.ftl @@ -241,6 +241,10 @@ ast_passes_tilde_const_disallowed = `[const]` is not allowed here .trait_assoc_ty = associated types in non-`const` traits cannot have `[const]` trait bounds .trait_impl_assoc_ty = associated types in non-const impls cannot have `[const]` trait bounds .inherent_assoc_ty = inherent associated types cannot have `[const]` trait bounds + .struct = structs cannot have `[const]` trait bounds + .enum = enums cannot have `[const]` trait bounds + .union = unions cannot have `[const]` trait bounds + .anon_const = anonymous constants cannot have `[const]` trait bounds .object = trait objects cannot have `[const]` trait bounds .item = this item cannot have `[const]` trait bounds diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index 895a457ec1d56..ae482ceb9b72b 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -1124,7 +1124,9 @@ impl<'a> Visitor<'a> for AstValidator<'a> { ); } } - visit::walk_item(self, item) + self.with_tilde_const(Some(TildeConstReason::Enum { span: item.span }), |this| { + visit::walk_item(this, item) + }); } ItemKind::Trait(box Trait { constness, @@ -1175,26 +1177,32 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } visit::walk_item(self, item) } - ItemKind::Struct(ident, generics, vdata) => match vdata { - VariantData::Struct { fields, .. } => { - self.visit_attrs_vis_ident(&item.attrs, &item.vis, ident); - self.visit_generics(generics); - walk_list!(self, visit_field_def, fields); - } - _ => visit::walk_item(self, item), - }, + ItemKind::Struct(ident, generics, vdata) => { + self.with_tilde_const(Some(TildeConstReason::Struct { span: item.span }), |this| { + match vdata { + VariantData::Struct { fields, .. } => { + this.visit_attrs_vis_ident(&item.attrs, &item.vis, ident); + this.visit_generics(generics); + walk_list!(this, visit_field_def, fields); + } + _ => visit::walk_item(this, item), + } + }) + } ItemKind::Union(ident, generics, vdata) => { if vdata.fields().is_empty() { self.dcx().emit_err(errors::FieldlessUnion { span: item.span }); } - match vdata { - VariantData::Struct { fields, .. } => { - self.visit_attrs_vis_ident(&item.attrs, &item.vis, ident); - self.visit_generics(generics); - walk_list!(self, visit_field_def, fields); + self.with_tilde_const(Some(TildeConstReason::Union { span: item.span }), |this| { + match vdata { + VariantData::Struct { fields, .. } => { + this.visit_attrs_vis_ident(&item.attrs, &item.vis, ident); + this.visit_generics(generics); + walk_list!(this, visit_field_def, fields); + } + _ => visit::walk_item(this, item), } - _ => visit::walk_item(self, item), - } + }); } ItemKind::Const(box ConstItem { defaultness, expr, .. }) => { self.check_defaultness(item.span, *defaultness); @@ -1623,6 +1631,13 @@ impl<'a> Visitor<'a> for AstValidator<'a> { _ => self.with_in_trait_impl(None, |this| visit::walk_assoc_item(this, item, ctxt)), } } + + fn visit_anon_const(&mut self, anon_const: &'a AnonConst) { + self.with_tilde_const( + Some(TildeConstReason::AnonConst { span: anon_const.value.span }), + |this| visit::walk_anon_const(this, anon_const), + ) + } } /// When encountering an equality constraint in a `where` clause, emit an error. If the code seems diff --git a/compiler/rustc_ast_passes/src/errors.rs b/compiler/rustc_ast_passes/src/errors.rs index fd4b2528541e3..8b5873a3ef37a 100644 --- a/compiler/rustc_ast_passes/src/errors.rs +++ b/compiler/rustc_ast_passes/src/errors.rs @@ -623,6 +623,26 @@ pub(crate) enum TildeConstReason { #[primary_span] span: Span, }, + #[note(ast_passes_struct)] + Struct { + #[primary_span] + span: Span, + }, + #[note(ast_passes_enum)] + Enum { + #[primary_span] + span: Span, + }, + #[note(ast_passes_union)] + Union { + #[primary_span] + span: Span, + }, + #[note(ast_passes_anon_const)] + AnonConst { + #[primary_span] + span: Span, + }, #[note(ast_passes_object)] TraitObject, #[note(ast_passes_item)] diff --git a/tests/ui/traits/const-traits/conditionally-const-in-anon-const.rs b/tests/ui/traits/const-traits/conditionally-const-in-anon-const.rs new file mode 100644 index 0000000000000..5aebcceb7c75d --- /dev/null +++ b/tests/ui/traits/const-traits/conditionally-const-in-anon-const.rs @@ -0,0 +1,28 @@ +#![feature(const_trait_impl, impl_trait_in_bindings)] + +struct S; +#[const_trait] +trait Trait {} + +impl const Trait<0> for () {} + +const fn f< + T: Trait< + { + const fn g>() {} + + struct I>(U); + //~^ ERROR `[const]` is not allowed here + + let x: &impl [const] Trait<0> = &(); + //~^ ERROR `[const]` is not allowed here + + 0 + }, + >, +>(x: &T) { + // Should be allowed here + let y: &impl [const] Trait<0> = x; +} + +pub fn main() {} diff --git a/tests/ui/traits/const-traits/conditionally-const-in-anon-const.stderr b/tests/ui/traits/const-traits/conditionally-const-in-anon-const.stderr new file mode 100644 index 0000000000000..c6be249b95a22 --- /dev/null +++ b/tests/ui/traits/const-traits/conditionally-const-in-anon-const.stderr @@ -0,0 +1,32 @@ +error: `[const]` is not allowed here + --> $DIR/conditionally-const-in-anon-const.rs:14:25 + | +LL | struct I>(U); + | ^^^^^^^ + | +note: structs cannot have `[const]` trait bounds + --> $DIR/conditionally-const-in-anon-const.rs:14:13 + | +LL | struct I>(U); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: `[const]` is not allowed here + --> $DIR/conditionally-const-in-anon-const.rs:17:26 + | +LL | let x: &impl [const] Trait<0> = &(); + | ^^^^^^^ + | +note: anonymous constants cannot have `[const]` trait bounds + --> $DIR/conditionally-const-in-anon-const.rs:11:9 + | +LL | / { +LL | | const fn g>() {} +LL | | +LL | | struct I>(U); +... | +LL | | 0 +LL | | }, + | |_________^ + +error: aborting due to 2 previous errors + diff --git a/tests/ui/traits/const-traits/conditionally-const-in-struct-args.rs b/tests/ui/traits/const-traits/conditionally-const-in-struct-args.rs deleted file mode 100644 index 0c644694585a6..0000000000000 --- a/tests/ui/traits/const-traits/conditionally-const-in-struct-args.rs +++ /dev/null @@ -1,21 +0,0 @@ -//@ compile-flags: -Znext-solver -//@ known-bug: #132067 -//@ check-pass - -#![feature(const_trait_impl)] - -struct S; -#[const_trait] -trait Trait {} - -const fn f< - T: Trait< - { - struct I>(U); - 0 - }, - >, ->() { -} - -pub fn main() {} diff --git a/tests/ui/traits/const-traits/conditionally-const-invalid-places.stderr b/tests/ui/traits/const-traits/conditionally-const-invalid-places.stderr index 010b158464361..5c3bb2369675e 100644 --- a/tests/ui/traits/const-traits/conditionally-const-invalid-places.stderr +++ b/tests/ui/traits/const-traits/conditionally-const-invalid-places.stderr @@ -16,7 +16,11 @@ error: `[const]` is not allowed here LL | struct Struct { field: T } | ^^^^^^^ | - = note: this item cannot have `[const]` trait bounds +note: structs cannot have `[const]` trait bounds + --> $DIR/conditionally-const-invalid-places.rs:9:1 + | +LL | struct Struct { field: T } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: `[const]` is not allowed here --> $DIR/conditionally-const-invalid-places.rs:10:23 @@ -24,7 +28,11 @@ error: `[const]` is not allowed here LL | struct TupleStruct(T); | ^^^^^^^ | - = note: this item cannot have `[const]` trait bounds +note: structs cannot have `[const]` trait bounds + --> $DIR/conditionally-const-invalid-places.rs:10:1 + | +LL | struct TupleStruct(T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: `[const]` is not allowed here --> $DIR/conditionally-const-invalid-places.rs:11:22 @@ -32,7 +40,11 @@ error: `[const]` is not allowed here LL | struct UnitStruct; | ^^^^^^^ | - = note: this item cannot have `[const]` trait bounds +note: structs cannot have `[const]` trait bounds + --> $DIR/conditionally-const-invalid-places.rs:11:1 + | +LL | struct UnitStruct; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: `[const]` is not allowed here --> $DIR/conditionally-const-invalid-places.rs:14:14 @@ -40,7 +52,11 @@ error: `[const]` is not allowed here LL | enum Enum { Variant(T) } | ^^^^^^^ | - = note: this item cannot have `[const]` trait bounds +note: enums cannot have `[const]` trait bounds + --> $DIR/conditionally-const-invalid-places.rs:14:1 + | +LL | enum Enum { Variant(T) } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: `[const]` is not allowed here --> $DIR/conditionally-const-invalid-places.rs:16:16 @@ -48,7 +64,11 @@ error: `[const]` is not allowed here LL | union Union { field: T } | ^^^^^^^ | - = note: this item cannot have `[const]` trait bounds +note: unions cannot have `[const]` trait bounds + --> $DIR/conditionally-const-invalid-places.rs:16:1 + | +LL | union Union { field: T } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: `[const]` is not allowed here --> $DIR/conditionally-const-invalid-places.rs:19:14 From 681651350f708260428b7b82621583f98c4d042f Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Sun, 3 Aug 2025 13:31:24 -0400 Subject: [PATCH 093/118] Only use bitcast in Builder::ret for non-native integers --- src/builder.rs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index dac3c3dcbb1bb..751f0ba24301c 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -539,9 +539,15 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { fn ret(&mut self, mut value: RValue<'gcc>) { let expected_return_type = self.current_func().get_return_type(); - if !expected_return_type.is_compatible_with(value.get_type()) { - // NOTE: due to opaque pointers now being used, we need to bitcast here. - value = self.context.new_bitcast(self.location, value, expected_return_type); + let value_type = value.get_type(); + if !expected_return_type.is_compatible_with(value_type) { + // NOTE: due to opaque pointers now being used, we need to (bit)cast here. + if self.is_native_int_type(value_type) && self.is_native_int_type(expected_return_type) + { + value = self.context.new_cast(self.location, value, expected_return_type); + } else { + value = self.context.new_bitcast(self.location, value, expected_return_type); + } } self.llbb().end_with_return(self.location, value); } From eee28138b89902426815a0d9dd96800c686f5003 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Sun, 3 Aug 2025 12:47:11 -0700 Subject: [PATCH 094/118] Use `as_array` in PartialEq for arrays --- library/core/src/array/equality.rs | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/library/core/src/array/equality.rs b/library/core/src/array/equality.rs index bb668d2a67309..1ad2cca64a347 100644 --- a/library/core/src/array/equality.rs +++ b/library/core/src/array/equality.rs @@ -22,18 +22,16 @@ where { #[inline] fn eq(&self, other: &[U]) -> bool { - let b: Result<&[U; N], _> = other.try_into(); - match b { - Ok(b) => *self == *b, - Err(_) => false, + match other.as_array::() { + Some(b) => *self == *b, + None => false, } } #[inline] fn ne(&self, other: &[U]) -> bool { - let b: Result<&[U; N], _> = other.try_into(); - match b { - Ok(b) => *self != *b, - Err(_) => true, + match other.as_array::() { + Some(b) => *self != *b, + None => true, } } } @@ -45,18 +43,16 @@ where { #[inline] fn eq(&self, other: &[U; N]) -> bool { - let b: Result<&[T; N], _> = self.try_into(); - match b { - Ok(b) => *b == *other, - Err(_) => false, + match self.as_array::() { + Some(b) => *b == *other, + None => false, } } #[inline] fn ne(&self, other: &[U; N]) -> bool { - let b: Result<&[T; N], _> = self.try_into(); - match b { - Ok(b) => *b != *other, - Err(_) => true, + match self.as_array::() { + Some(b) => *b != *other, + None => true, } } } From f92934f43ac34d65a0c39e589d330f27a2f45574 Mon Sep 17 00:00:00 2001 From: Jonathan Brouwer Date: Sun, 3 Aug 2025 21:48:23 +0200 Subject: [PATCH 095/118] Remove `SHOULD_EMIT_LINTS` in favor of `should_emit` --- compiler/rustc_attr_parsing/src/context.rs | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_attr_parsing/src/context.rs b/compiler/rustc_attr_parsing/src/context.rs index 767d19bd23406..91aae71cb82a6 100644 --- a/compiler/rustc_attr_parsing/src/context.rs +++ b/compiler/rustc_attr_parsing/src/context.rs @@ -224,7 +224,6 @@ mod private { #[allow(private_interfaces)] pub trait Stage: Sized + 'static + Sealed { type Id: Copy; - const SHOULD_EMIT_LINTS: bool; fn parsers() -> &'static GroupType; @@ -233,13 +232,14 @@ pub trait Stage: Sized + 'static + Sealed { sess: &'sess Session, diag: impl for<'x> Diagnostic<'x>, ) -> ErrorGuaranteed; + + fn should_emit(&self) -> ShouldEmit; } // allow because it's a sealed trait #[allow(private_interfaces)] impl Stage for Early { type Id = NodeId; - const SHOULD_EMIT_LINTS: bool = false; fn parsers() -> &'static GroupType { &early::ATTRIBUTE_PARSERS @@ -255,13 +255,16 @@ impl Stage for Early { sess.dcx().create_err(diag).delay_as_bug() } } + + fn should_emit(&self) -> ShouldEmit { + self.emit_errors + } } // allow because it's a sealed trait #[allow(private_interfaces)] impl Stage for Late { type Id = HirId; - const SHOULD_EMIT_LINTS: bool = true; fn parsers() -> &'static GroupType { &late::ATTRIBUTE_PARSERS @@ -273,6 +276,10 @@ impl Stage for Late { ) -> ErrorGuaranteed { tcx.dcx().emit_err(diag) } + + fn should_emit(&self) -> ShouldEmit { + ShouldEmit::ErrorsAndLints + } } /// used when parsing attributes for miscellaneous things *before* ast lowering @@ -311,7 +318,7 @@ impl<'f, 'sess: 'f, S: Stage> SharedContext<'f, 'sess, S> { /// must be delayed until after HIR is built. This method will take care of the details of /// that. pub(crate) fn emit_lint(&mut self, lint: AttributeLintKind, span: Span) { - if !S::SHOULD_EMIT_LINTS { + if !self.stage.should_emit().should_emit() { return; } let id = self.target_id; From 566c9aeb66977f947f7b5a03d7e734c96625ca7f Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Sun, 3 Aug 2025 16:21:58 -0400 Subject: [PATCH 096/118] Fix simd_funnel_shift --- src/intrinsic/simd.rs | 35 +++++++++++++++++++---------------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/src/intrinsic/simd.rs b/src/intrinsic/simd.rs index 6748e1a412579..fdc15d580effc 100644 --- a/src/intrinsic/simd.rs +++ b/src/intrinsic/simd.rs @@ -1465,27 +1465,26 @@ fn simd_funnel_shift<'a, 'gcc, 'tcx>( shift: RValue<'gcc>, shift_left: bool, ) -> RValue<'gcc> { + use crate::common::SignType; + let a_type = a.get_type(); let vector_type = a_type.unqualified().dyncast_vector().expect("vector type"); let num_units = vector_type.get_num_units(); let elem_type = vector_type.get_element_type(); - let (new_int_type, int_shift_val, int_mask) = if elem_type.is_compatible_with(bx.u8_type) { + let (new_int_type, int_shift_val, int_mask) = if elem_type.is_compatible_with(bx.u8_type) + || elem_type.is_compatible_with(bx.i8_type) + { (bx.u16_type, 8, u8::MAX as u64) - } else if elem_type.is_compatible_with(bx.u16_type) { + } else if elem_type.is_compatible_with(bx.u16_type) || elem_type.is_compatible_with(bx.i16_type) + { (bx.u32_type, 16, u16::MAX as u64) - } else if elem_type.is_compatible_with(bx.u32_type) { + } else if elem_type.is_compatible_with(bx.u32_type) || elem_type.is_compatible_with(bx.i32_type) + { (bx.u64_type, 32, u32::MAX as u64) - } else if elem_type.is_compatible_with(bx.u64_type) { + } else if elem_type.is_compatible_with(bx.u64_type) || elem_type.is_compatible_with(bx.i64_type) + { (bx.u128_type, 64, u64::MAX) - } else if elem_type.is_compatible_with(bx.i8_type) { - (bx.i16_type, 8, u8::MAX as u64) - } else if elem_type.is_compatible_with(bx.i16_type) { - (bx.i32_type, 16, u16::MAX as u64) - } else if elem_type.is_compatible_with(bx.i32_type) { - (bx.i64_type, 32, u32::MAX as u64) - } else if elem_type.is_compatible_with(bx.i64_type) { - (bx.i128_type, 64, u64::MAX) } else { unimplemented!("funnel shift on {:?}", elem_type); }; @@ -1493,21 +1492,25 @@ fn simd_funnel_shift<'a, 'gcc, 'tcx>( let int_mask = bx.context.new_rvalue_from_long(new_int_type, int_mask as i64); let int_shift_val = bx.context.new_rvalue_from_int(new_int_type, int_shift_val); let mut elements = vec![]; + let unsigned_type = elem_type.to_unsigned(bx); for i in 0..num_units { let index = bx.context.new_rvalue_from_int(bx.int_type, i as i32); let a_val = bx.context.new_vector_access(None, a, index).to_rvalue(); - let a_val = bx.context.new_cast(None, a_val, new_int_type); + let a_val = bx.context.new_bitcast(None, a_val, unsigned_type); + // TODO: we probably need to use gcc_int_cast instead. + let a_val = bx.gcc_int_cast(a_val, new_int_type); let b_val = bx.context.new_vector_access(None, b, index).to_rvalue(); - let b_val = bx.context.new_cast(None, b_val, new_int_type); + let b_val = bx.context.new_bitcast(None, b_val, unsigned_type); + let b_val = bx.gcc_int_cast(b_val, new_int_type); let shift_val = bx.context.new_vector_access(None, shift, index).to_rvalue(); - let shift_val = bx.context.new_cast(None, shift_val, new_int_type); + let shift_val = bx.gcc_int_cast(shift_val, new_int_type); let mut val = a_val << int_shift_val | b_val; if shift_left { val = (val << shift_val) >> int_shift_val; } else { val = (val >> shift_val) & int_mask; } - let val = bx.context.new_cast(None, val, elem_type); + let val = bx.gcc_int_cast(val, elem_type); elements.push(val); } bx.context.new_rvalue_from_vector(None, a_type, &elements) From cf3865f7a540706cb4ae1537b8f37d136e47da4e Mon Sep 17 00:00:00 2001 From: Connor Tsui Date: Sun, 3 Aug 2025 22:28:04 +0200 Subject: [PATCH 097/118] fix broken doc section link in `poison.rs` --- library/std/src/sync/poison.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/std/src/sync/poison.rs b/library/std/src/sync/poison.rs index d5adc9e29b505..31889dcc10fad 100644 --- a/library/std/src/sync/poison.rs +++ b/library/std/src/sync/poison.rs @@ -11,7 +11,7 @@ //! //! The specifics of how this "poisoned" state affects other threads and whether //! the panics are recognized reliably or on a best-effort basis depend on the -//! primitive. See [#Overview] below. +//! primitive. See [Overview](#overview) below. //! //! For the alternative implementations that do not employ poisoning, //! see [`std::sync::nonpoison`]. From 37922fc24cd785fff543be0c31b5c8529d7339f0 Mon Sep 17 00:00:00 2001 From: Connor Tsui Date: Sun, 3 Aug 2025 22:28:49 +0200 Subject: [PATCH 098/118] add poisoning documentation to `LazyCell` --- library/core/src/cell/lazy.rs | 61 +++++++++++++++++++++++++++++++++-- 1 file changed, 59 insertions(+), 2 deletions(-) diff --git a/library/core/src/cell/lazy.rs b/library/core/src/cell/lazy.rs index 1758e84ad7cdf..a1bd4c8571706 100644 --- a/library/core/src/cell/lazy.rs +++ b/library/core/src/cell/lazy.rs @@ -15,6 +15,22 @@ enum State { /// /// [`std::sync::LazyLock`]: ../../std/sync/struct.LazyLock.html /// +/// # Poisoning +/// +/// If the initialization closure passed to [`LazyCell::new`] panics, the cell will be poisoned. +/// Once the cell is poisoned, any threads that attempt to access this cell (via a dereference +/// or via an explicit call to [`force()`]) will panic. +/// +/// This concept is similar to that of poisoning in the [`std::sync::poison`] module. A key +/// difference, however, is that poisoning in `LazyCell` is _unrecoverable_. All future accesses of +/// the cell from other threads will panic, whereas a type in [`std::sync::poison`] like +/// [`std::sync::poison::Mutex`] allows recovery via [`PoisonError::into_inner()`]. +/// +/// [`force()`]: LazyCell::force +/// [`std::sync::poison`]: ../../std/sync/poison/index.html +/// [`std::sync::poison::Mutex`]: ../../std/sync/poison/struct.Mutex.html +/// [`PoisonError::into_inner()`]: ../../std/sync/poison/struct.PoisonError.html#method.into_inner +/// /// # Examples /// /// ``` @@ -64,6 +80,10 @@ impl T> LazyCell { /// /// Returns `Ok(value)` if `Lazy` is initialized and `Err(f)` otherwise. /// + /// # Panics + /// + /// Panics if the cell is poisoned. + /// /// # Examples /// /// ``` @@ -93,6 +113,15 @@ impl T> LazyCell { /// /// This is equivalent to the `Deref` impl, but is explicit. /// + /// # Panics + /// + /// If the initialization closure panics (the one that is passed to the [`new()`] method), the + /// panic is propagated to the caller, and the cell becomes poisoned. This will cause all future + /// accesses of the cell (via [`force()`] or a dereference) to panic. + /// + /// [`new()`]: LazyCell::new + /// [`force()`]: LazyCell::force + /// /// # Examples /// /// ``` @@ -123,6 +152,15 @@ impl T> LazyCell { /// Forces the evaluation of this lazy value and returns a mutable reference to /// the result. /// + /// # Panics + /// + /// If the initialization closure panics (the one that is passed to the [`new()`] method), the + /// panic is propagated to the caller, and the cell becomes poisoned. This will cause all future + /// accesses of the cell (via [`force()`] or a dereference) to panic. + /// + /// [`new()`]: LazyCell::new + /// [`force()`]: LazyCell::force + /// /// # Examples /// /// ``` @@ -219,7 +257,8 @@ impl T> LazyCell { } impl LazyCell { - /// Returns a mutable reference to the value if initialized, or `None` if not. + /// Returns a mutable reference to the value if initialized. Otherwise (if uninitialized or + /// poisoned), returns `None`. /// /// # Examples /// @@ -245,7 +284,8 @@ impl LazyCell { } } - /// Returns a reference to the value if initialized, or `None` if not. + /// Returns a reference to the value if initialized. Otherwise (if uninitialized or poisoned), + /// returns `None`. /// /// # Examples /// @@ -278,6 +318,15 @@ impl LazyCell { #[stable(feature = "lazy_cell", since = "1.80.0")] impl T> Deref for LazyCell { type Target = T; + + /// # Panics + /// + /// If the initialization closure panics (the one that is passed to the [`new()`] method), the + /// panic is propagated to the caller, and the cell becomes poisoned. This will cause all future + /// accesses of the cell (via [`force()`] or a dereference) to panic. + /// + /// [`new()`]: LazyCell::new + /// [`force()`]: LazyCell::force #[inline] fn deref(&self) -> &T { LazyCell::force(self) @@ -286,6 +335,14 @@ impl T> Deref for LazyCell { #[stable(feature = "lazy_deref_mut", since = "1.89.0")] impl T> DerefMut for LazyCell { + /// # Panics + /// + /// If the initialization closure panics (the one that is passed to the [`new()`] method), the + /// panic is propagated to the caller, and the cell becomes poisoned. This will cause all future + /// accesses of the cell (via [`force()`] or a dereference) to panic. + /// + /// [`new()`]: LazyCell::new + /// [`force()`]: LazyCell::force #[inline] fn deref_mut(&mut self) -> &mut T { LazyCell::force_mut(self) From 96adb7df9628ae2f495dbafde387dda0152e5fb8 Mon Sep 17 00:00:00 2001 From: Connor Tsui Date: Sun, 3 Aug 2025 22:29:19 +0200 Subject: [PATCH 099/118] add poisoning documentation to `LazyLock` --- library/std/src/sync/lazy_lock.rs | 60 +++++++++++++++++++++++++++++-- 1 file changed, 58 insertions(+), 2 deletions(-) diff --git a/library/std/src/sync/lazy_lock.rs b/library/std/src/sync/lazy_lock.rs index eba849d16dacd..a40e29a772a9c 100644 --- a/library/std/src/sync/lazy_lock.rs +++ b/library/std/src/sync/lazy_lock.rs @@ -25,6 +25,22 @@ union Data { /// /// [`LazyCell`]: crate::cell::LazyCell /// +/// # Poisoning +/// +/// If the initialization closure passed to [`LazyLock::new`] panics, the lock will be poisoned. +/// Once the lock is poisoned, any threads that attempt to access this lock (via a dereference +/// or via an explicit call to [`force()`]) will panic. +/// +/// This concept is similar to that of poisoning in the [`std::sync::poison`] module. A key +/// difference, however, is that poisoning in `LazyLock` is _unrecoverable_. All future accesses of +/// the lock from other threads will panic, whereas a type in [`std::sync::poison`] like +/// [`std::sync::poison::Mutex`] allows recovery via [`PoisonError::into_inner()`]. +/// +/// [`force()`]: LazyLock::force +/// [`std::sync::poison`]: crate::sync::poison +/// [`std::sync::poison::Mutex`]: crate::sync::poison::Mutex +/// [`PoisonError::into_inner()`]: crate::sync::poison::PoisonError::into_inner +/// /// # Examples /// /// Initialize static variables with `LazyLock`. @@ -102,6 +118,10 @@ impl T> LazyLock { /// /// Returns `Ok(value)` if `Lazy` is initialized and `Err(f)` otherwise. /// + /// # Panics + /// + /// Panics if the lock is poisoned. + /// /// # Examples /// /// ``` @@ -136,6 +156,15 @@ impl T> LazyLock { /// Forces the evaluation of this lazy value and returns a mutable reference to /// the result. /// + /// # Panics + /// + /// If the initialization closure panics (the one that is passed to the [`new()`] method), the + /// panic is propagated to the caller, and the lock becomes poisoned. This will cause all future + /// accesses of the lock (via [`force()`] or a dereference) to panic. + /// + /// [`new()`]: LazyLock::new + /// [`force()`]: LazyLock::force + /// /// # Examples /// /// ``` @@ -193,6 +222,15 @@ impl T> LazyLock { /// This method will block the calling thread if another initialization /// routine is currently running. /// + /// # Panics + /// + /// If the initialization closure panics (the one that is passed to the [`new()`] method), the + /// panic is propagated to the caller, and the lock becomes poisoned. This will cause all future + /// accesses of the lock (via [`force()`] or a dereference) to panic. + /// + /// [`new()`]: LazyLock::new + /// [`force()`]: LazyLock::force + /// /// # Examples /// /// ``` @@ -227,7 +265,8 @@ impl T> LazyLock { } impl LazyLock { - /// Returns a mutable reference to the value if initialized, or `None` if not. + /// Returns a mutable reference to the value if initialized. Otherwise (if uninitialized or + /// poisoned), returns `None`. /// /// # Examples /// @@ -256,7 +295,8 @@ impl LazyLock { } } - /// Returns a reference to the value if initialized, or `None` if not. + /// Returns a reference to the value if initialized. Otherwise (if uninitialized or poisoned), + /// returns `None`. /// /// # Examples /// @@ -307,6 +347,14 @@ impl T> Deref for LazyLock { /// This method will block the calling thread if another initialization /// routine is currently running. /// + /// # Panics + /// + /// If the initialization closure panics (the one that is passed to the [`new()`] method), the + /// panic is propagated to the caller, and the lock becomes poisoned. This will cause all future + /// accesses of the lock (via [`force()`] or a dereference) to panic. + /// + /// [`new()`]: LazyLock::new + /// [`force()`]: LazyLock::force #[inline] fn deref(&self) -> &T { LazyLock::force(self) @@ -315,6 +363,14 @@ impl T> Deref for LazyLock { #[stable(feature = "lazy_deref_mut", since = "1.89.0")] impl T> DerefMut for LazyLock { + /// # Panics + /// + /// If the initialization closure panics (the one that is passed to the [`new()`] method), the + /// panic is propagated to the caller, and the lock becomes poisoned. This will cause all future + /// accesses of the lock (via [`force()`] or a dereference) to panic. + /// + /// [`new()`]: LazyLock::new + /// [`force()`]: LazyLock::force #[inline] fn deref_mut(&mut self) -> &mut T { LazyLock::force_mut(self) From cc62d552980238335fb7ef4c28196b63a4ec8894 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Mon, 4 Aug 2025 08:16:32 +1000 Subject: [PATCH 100/118] Avoid some code duplication. `print_binder` can call `wrap_binder`. --- compiler/rustc_middle/src/ty/print/pretty.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 033f1e6cd0641..b381d62be47e6 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -2437,12 +2437,7 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { where T: Print<'tcx, Self> + TypeFoldable>, { - let old_region_index = self.region_index; - let (new_value, _) = self.name_all_regions(value, WrapBinderMode::ForAll)?; - new_value.print(self)?; - self.region_index = old_region_index; - self.binder_depth -= 1; - Ok(()) + self.wrap_binder(value, WrapBinderMode::ForAll, |new_value, this| new_value.print(this)) } fn wrap_binder Result<(), PrintError>>( From 6b363ae114e105baf8f91c3b34882b08b52a48b6 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Sun, 3 Aug 2025 18:58:23 -0400 Subject: [PATCH 101/118] Update to nightly-2025-08-03 --- rust-toolchain | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust-toolchain b/rust-toolchain index b220c804c2f2d..058e734be5cf5 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2025-07-21" +channel = "nightly-2025-08-03" components = ["rust-src", "rustc-dev", "llvm-tools-preview"] From 24f17515523a1abd255fee605483aed5d38843ff Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Sun, 3 Aug 2025 19:30:57 -0400 Subject: [PATCH 102/118] Add new failing test --- tests/failing-ui-tests.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/failing-ui-tests.txt b/tests/failing-ui-tests.txt index 4dc9507f28f15..41fb4729c07d4 100644 --- a/tests/failing-ui-tests.txt +++ b/tests/failing-ui-tests.txt @@ -84,3 +84,5 @@ tests/ui/simd/intrinsic/generic-arithmetic-pass.rs tests/ui/linking/no-gc-encapsulation-symbols.rs tests/ui/panics/unwind-force-no-unwind-tables.rs tests/ui/attributes/fn-align-dyn.rs +tests/ui/linkage-attr/raw-dylib/elf/glibc-x86_64.rs +tests/ui/explicit-tail-calls/recursion-etc.rs From dac1f3451831c841fad635aca9c801c81652b2e9 Mon Sep 17 00:00:00 2001 From: Antoni Boucher Date: Sun, 3 Aug 2025 19:57:04 -0400 Subject: [PATCH 103/118] Fix stdarch patch --- ...1-Add-stdarch-Cargo.toml-for-testing.patch | 23 +++++++++---------- 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/patches/0001-Add-stdarch-Cargo.toml-for-testing.patch b/patches/0001-Add-stdarch-Cargo.toml-for-testing.patch index 9cc377850b9b7..3a8c37a8b8d9a 100644 --- a/patches/0001-Add-stdarch-Cargo.toml-for-testing.patch +++ b/patches/0001-Add-stdarch-Cargo.toml-for-testing.patch @@ -1,29 +1,28 @@ -From b8f3eed3053c9333b5dfbeaeb2a6a65a4b3156df Mon Sep 17 00:00:00 2001 -From: Antoni Boucher -Date: Tue, 29 Aug 2023 13:06:34 -0400 +From 190e26c9274b3c93a9ee3516b395590e6bd9213b Mon Sep 17 00:00:00 2001 +From: None +Date: Sun, 3 Aug 2025 19:54:56 -0400 Subject: [PATCH] Patch 0001-Add-stdarch-Cargo.toml-for-testing.patch --- - library/stdarch/Cargo.toml | 23 +++++++++++++++++++++++ - 1 file changed, 23 insertions(+) + library/stdarch/Cargo.toml | 20 ++++++++++++++++++++ + 1 file changed, 20 insertions(+) create mode 100644 library/stdarch/Cargo.toml diff --git a/library/stdarch/Cargo.toml b/library/stdarch/Cargo.toml new file mode 100644 -index 0000000..4c63700 +index 0000000..bd6725c --- /dev/null +++ b/library/stdarch/Cargo.toml -@@ -0,0 +1,21 @@ +@@ -0,0 +1,20 @@ +[workspace] +resolver = "1" +members = [ -+ "crates/core_arch", -+ "crates/std_detect", -+ "crates/stdarch-gen-arm", ++ "crates/*", + #"examples/" +] +exclude = [ -+ "crates/wasm-assert-instr-tests" ++ "crates/wasm-assert-instr-tests", ++ "rust_programs", +] + +[profile.release] @@ -36,5 +35,5 @@ index 0000000..4c63700 +opt-level = 3 +incremental = true -- -2.42.0 +2.50.1 From 33706fc17f858caf42d36a9f67ef794269b6a83f Mon Sep 17 00:00:00 2001 From: Zalathar Date: Sat, 2 Aug 2025 22:17:14 +1000 Subject: [PATCH 104/118] coverage: Include an `Instance` in `CovfunRecord` for debug messages --- .../rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs index b704cf2b1cd46..44b6eb0e68903 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs @@ -27,6 +27,9 @@ use crate::llvm; /// the final record that will be embedded in the `__llvm_covfun` section. #[derive(Debug)] pub(crate) struct CovfunRecord<'tcx> { + /// Not used directly, but helpful in debug messages. + _instance: Instance<'tcx>, + mangled_function_name: &'tcx str, source_hash: u64, is_used: bool, @@ -55,6 +58,7 @@ pub(crate) fn prepare_covfun_record<'tcx>( let expressions = prepare_expressions(ids_info); let mut covfun = CovfunRecord { + _instance: instance, mangled_function_name: tcx.symbol_name(instance).name, source_hash: if is_used { fn_cov_info.function_source_hash } else { 0 }, is_used, @@ -106,7 +110,7 @@ fn fill_region_tables<'tcx>( // first mapping's span to determine the file. let source_map = tcx.sess.source_map(); let Some(first_span) = (try { fn_cov_info.mappings.first()?.span }) else { - debug_assert!(false, "function has no mappings: {:?}", covfun.mangled_function_name); + debug_assert!(false, "function has no mappings: {covfun:?}"); return; }; let source_file = source_map.lookup_source_file(first_span.lo()); @@ -184,6 +188,7 @@ pub(crate) fn generate_covfun_record<'tcx>( covfun: &CovfunRecord<'tcx>, ) { let &CovfunRecord { + _instance, mangled_function_name, source_hash, is_used, From 16843ce427965a39a5f226ce3057ba3c2f5ed23c Mon Sep 17 00:00:00 2001 From: Zalathar Date: Fri, 9 May 2025 21:27:27 +1000 Subject: [PATCH 105/118] coverage: Hoist `counter_for_bcb` out of its loop Having this helper function in the loop was confusing, because it doesn't rely on anything that changes between loop iterations. --- .../src/coverageinfo/mapgen/covfun.rs | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs index 44b6eb0e68903..69dfe1e98db15 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs @@ -106,6 +106,16 @@ fn fill_region_tables<'tcx>( ids_info: &'tcx CoverageIdsInfo, covfun: &mut CovfunRecord<'tcx>, ) { + // If this function is unused, replace all counters with zero. + let counter_for_bcb = |bcb: BasicCoverageBlock| -> ffi::Counter { + let term = if covfun.is_used { + ids_info.term_for_bcb[bcb].expect("every BCB in a mapping was given a term") + } else { + CovTerm::Zero + }; + ffi::Counter::from_term(term) + }; + // Currently a function's mappings must all be in the same file, so use the // first mapping's span to determine the file. let source_map = tcx.sess.source_map(); @@ -137,16 +147,6 @@ fn fill_region_tables<'tcx>( // For each counter/region pair in this function+file, convert it to a // form suitable for FFI. for &Mapping { ref kind, span } in &fn_cov_info.mappings { - // If this function is unused, replace all counters with zero. - let counter_for_bcb = |bcb: BasicCoverageBlock| -> ffi::Counter { - let term = if covfun.is_used { - ids_info.term_for_bcb[bcb].expect("every BCB in a mapping was given a term") - } else { - CovTerm::Zero - }; - ffi::Counter::from_term(term) - }; - let Some(coords) = make_coords(span) else { continue }; let cov_span = coords.make_coverage_span(local_file_id); From 51e62a09a376e30838db0d8ade4e3e89508357e0 Mon Sep 17 00:00:00 2001 From: Zalathar Date: Sat, 2 Aug 2025 18:20:56 +1000 Subject: [PATCH 106/118] coverage: Remove `-Zcoverage-options=no-mir-spans` This flag turned out to be less useful than anticipated, and interferes with work towards expansion support. --- compiler/rustc_interface/src/tests.rs | 4 +- .../src/coverage/mappings.rs | 6 +- compiler/rustc_session/src/config.rs | 8 -- compiler/rustc_session/src/options.rs | 4 +- compiler/rustc_session/src/session.rs | 5 -- tests/coverage/branch/no-mir-spans.cov-map | 63 --------------- tests/coverage/branch/no-mir-spans.coverage | 77 ------------------- tests/coverage/branch/no-mir-spans.rs | 62 --------------- .../coverage-options.bad.stderr | 2 +- 9 files changed, 5 insertions(+), 226 deletions(-) delete mode 100644 tests/coverage/branch/no-mir-spans.cov-map delete mode 100644 tests/coverage/branch/no-mir-spans.coverage delete mode 100644 tests/coverage/branch/no-mir-spans.rs diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs index 8771bb4405049..86faab62d03a7 100644 --- a/compiler/rustc_interface/src/tests.rs +++ b/compiler/rustc_interface/src/tests.rs @@ -778,8 +778,8 @@ fn test_unstable_options_tracking_hash() { coverage_options, CoverageOptions { level: CoverageLevel::Mcdc, - no_mir_spans: true, - discard_all_spans_in_codegen: true + // (don't collapse test-only options onto the same line) + discard_all_spans_in_codegen: true, } ); tracked!(crate_attr, vec!["abc".to_string()]); diff --git a/compiler/rustc_mir_transform/src/coverage/mappings.rs b/compiler/rustc_mir_transform/src/coverage/mappings.rs index b4b4d0416fb99..c79b76d90f29c 100644 --- a/compiler/rustc_mir_transform/src/coverage/mappings.rs +++ b/compiler/rustc_mir_transform/src/coverage/mappings.rs @@ -82,15 +82,11 @@ pub(super) fn extract_all_mapping_info_from_mir<'tcx>( let mut mcdc_degraded_branches = vec![]; let mut mcdc_mappings = vec![]; - if hir_info.is_async_fn || tcx.sess.coverage_no_mir_spans() { + if hir_info.is_async_fn { // An async function desugars into a function that returns a future, // with the user code wrapped in a closure. Any spans in the desugared // outer function will be unhelpful, so just keep the signature span // and ignore all of the spans in the MIR body. - // - // When debugging flag `-Zcoverage-options=no-mir-spans` is set, we need - // to give the same treatment to _all_ functions, because `llvm-cov` - // seems to ignore functions that don't have any ordinary code spans. if let Some(span) = hir_info.fn_sig_span { code_mappings.push(CodeMapping { span, bcb: START_BCB }); } diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index 8f624e0fb2fcb..e82f4527c6cdc 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -182,14 +182,6 @@ pub enum InstrumentCoverage { pub struct CoverageOptions { pub level: CoverageLevel, - /// `-Zcoverage-options=no-mir-spans`: Don't extract block coverage spans - /// from MIR statements/terminators, making it easier to inspect/debug - /// branch and MC/DC coverage mappings. - /// - /// For internal debugging only. If other code changes would make it hard - /// to keep supporting this flag, remove it. - pub no_mir_spans: bool, - /// `-Zcoverage-options=discard-all-spans-in-codegen`: During codegen, /// discard all coverage spans as though they were invalid. Needed by /// regression tests for #133606, because we don't have an easy way to diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index 44b35e8921ec3..880b08d444414 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -755,8 +755,7 @@ mod desc { pub(crate) const parse_linker_flavor: &str = ::rustc_target::spec::LinkerFlavorCli::one_of(); pub(crate) const parse_dump_mono_stats: &str = "`markdown` (default) or `json`"; pub(crate) const parse_instrument_coverage: &str = parse_bool; - pub(crate) const parse_coverage_options: &str = - "`block` | `branch` | `condition` | `mcdc` | `no-mir-spans`"; + pub(crate) const parse_coverage_options: &str = "`block` | `branch` | `condition` | `mcdc`"; pub(crate) const parse_instrument_xray: &str = "either a boolean (`yes`, `no`, `on`, `off`, etc), or a comma separated list of settings: `always` or `never` (mutually exclusive), `ignore-loops`, `instruction-threshold=N`, `skip-entry`, `skip-exit`"; pub(crate) const parse_unpretty: &str = "`string` or `string=string`"; pub(crate) const parse_treat_err_as_bug: &str = "either no value or a non-negative number"; @@ -1460,7 +1459,6 @@ pub mod parse { "branch" => slot.level = CoverageLevel::Branch, "condition" => slot.level = CoverageLevel::Condition, "mcdc" => slot.level = CoverageLevel::Mcdc, - "no-mir-spans" => slot.no_mir_spans = true, "discard-all-spans-in-codegen" => slot.discard_all_spans_in_codegen = true, _ => return false, } diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index e7097ec832753..c311a726aa7d0 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -359,11 +359,6 @@ impl Session { && self.opts.unstable_opts.coverage_options.level >= CoverageLevel::Mcdc } - /// True if `-Zcoverage-options=no-mir-spans` was passed. - pub fn coverage_no_mir_spans(&self) -> bool { - self.opts.unstable_opts.coverage_options.no_mir_spans - } - /// True if `-Zcoverage-options=discard-all-spans-in-codegen` was passed. pub fn coverage_discard_all_spans_in_codegen(&self) -> bool { self.opts.unstable_opts.coverage_options.discard_all_spans_in_codegen diff --git a/tests/coverage/branch/no-mir-spans.cov-map b/tests/coverage/branch/no-mir-spans.cov-map deleted file mode 100644 index 4f893cba1f815..0000000000000 --- a/tests/coverage/branch/no-mir-spans.cov-map +++ /dev/null @@ -1,63 +0,0 @@ -Function name: no_mir_spans::while_cond -Raw bytes (18): 0x[01, 01, 01, 05, 01, 02, 01, 10, 01, 00, 10, 20, 02, 01, 04, 0b, 00, 10] -Number of files: 1 -- file 0 => $DIR/no-mir-spans.rs -Number of expressions: 1 -- expression 0 operands: lhs = Counter(1), rhs = Counter(0) -Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 16, 1) to (start + 0, 16) -- Branch { true: Expression(0, Sub), false: Counter(0) } at (prev + 4, 11) to (start + 0, 16) - true = (c1 - c0) - false = c0 -Highest counter ID seen: c0 - -Function name: no_mir_spans::while_cond_not -Raw bytes (18): 0x[01, 01, 01, 05, 01, 02, 01, 19, 01, 00, 14, 20, 02, 01, 04, 0b, 00, 14] -Number of files: 1 -- file 0 => $DIR/no-mir-spans.rs -Number of expressions: 1 -- expression 0 operands: lhs = Counter(1), rhs = Counter(0) -Number of file 0 mappings: 2 -- Code(Counter(0)) at (prev + 25, 1) to (start + 0, 20) -- Branch { true: Expression(0, Sub), false: Counter(0) } at (prev + 4, 11) to (start + 0, 20) - true = (c1 - c0) - false = c0 -Highest counter ID seen: c0 - -Function name: no_mir_spans::while_op_and -Raw bytes (31): 0x[01, 01, 04, 09, 05, 09, 01, 0f, 09, 01, 05, 03, 01, 22, 01, 00, 12, 20, 05, 02, 05, 0b, 00, 10, 20, 06, 0a, 00, 14, 00, 19] -Number of files: 1 -- file 0 => $DIR/no-mir-spans.rs -Number of expressions: 4 -- expression 0 operands: lhs = Counter(2), rhs = Counter(1) -- expression 1 operands: lhs = Counter(2), rhs = Counter(0) -- expression 2 operands: lhs = Expression(3, Add), rhs = Counter(2) -- expression 3 operands: lhs = Counter(0), rhs = Counter(1) -Number of file 0 mappings: 3 -- Code(Counter(0)) at (prev + 34, 1) to (start + 0, 18) -- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 5, 11) to (start + 0, 16) - true = c1 - false = (c2 - c1) -- Branch { true: Expression(1, Sub), false: Expression(2, Sub) } at (prev + 0, 20) to (start + 0, 25) - true = (c2 - c0) - false = ((c0 + c1) - c2) -Highest counter ID seen: c1 - -Function name: no_mir_spans::while_op_or -Raw bytes (29): 0x[01, 01, 03, 09, 05, 09, 0b, 01, 05, 03, 01, 2d, 01, 00, 11, 20, 05, 02, 05, 0b, 00, 10, 20, 06, 01, 00, 14, 00, 19] -Number of files: 1 -- file 0 => $DIR/no-mir-spans.rs -Number of expressions: 3 -- expression 0 operands: lhs = Counter(2), rhs = Counter(1) -- expression 1 operands: lhs = Counter(2), rhs = Expression(2, Add) -- expression 2 operands: lhs = Counter(0), rhs = Counter(1) -Number of file 0 mappings: 3 -- Code(Counter(0)) at (prev + 45, 1) to (start + 0, 17) -- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 5, 11) to (start + 0, 16) - true = c1 - false = (c2 - c1) -- Branch { true: Expression(1, Sub), false: Counter(0) } at (prev + 0, 20) to (start + 0, 25) - true = (c2 - (c0 + c1)) - false = c0 -Highest counter ID seen: c1 - diff --git a/tests/coverage/branch/no-mir-spans.coverage b/tests/coverage/branch/no-mir-spans.coverage deleted file mode 100644 index 2cae98ed3ff4f..0000000000000 --- a/tests/coverage/branch/no-mir-spans.coverage +++ /dev/null @@ -1,77 +0,0 @@ - LL| |#![feature(coverage_attribute)] - LL| |//@ edition: 2021 - LL| |//@ compile-flags: -Zcoverage-options=branch,no-mir-spans - LL| |//@ llvm-cov-flags: --show-branches=count - LL| | - LL| |// Tests the behaviour of the `-Zcoverage-options=no-mir-spans` debugging flag. - LL| |// The actual code below is just some non-trivial code copied from another test - LL| |// (`while.rs`), and has no particular significance. - LL| | - LL| |macro_rules! no_merge { - LL| | () => { - LL| | for _ in 0..1 {} - LL| | }; - LL| |} - LL| | - LL| 1|fn while_cond() { - LL| | no_merge!(); - LL| | - LL| | let mut a = 8; - LL| | while a > 0 { - ------------------ - | Branch (LL:11): [True: 8, False: 1] - ------------------ - LL| | a -= 1; - LL| | } - LL| |} - LL| | - LL| 1|fn while_cond_not() { - LL| | no_merge!(); - LL| | - LL| | let mut a = 8; - LL| | while !(a == 0) { - ------------------ - | Branch (LL:11): [True: 8, False: 1] - ------------------ - LL| | a -= 1; - LL| | } - LL| |} - LL| | - LL| 1|fn while_op_and() { - LL| | no_merge!(); - LL| | - LL| | let mut a = 8; - LL| | let mut b = 4; - LL| | while a > 0 && b > 0 { - ------------------ - | Branch (LL:11): [True: 5, False: 0] - | Branch (LL:20): [True: 4, False: 1] - ------------------ - LL| | a -= 1; - LL| | b -= 1; - LL| | } - LL| |} - LL| | - LL| 1|fn while_op_or() { - LL| | no_merge!(); - LL| | - LL| | let mut a = 4; - LL| | let mut b = 8; - LL| | while a > 0 || b > 0 { - ------------------ - | Branch (LL:11): [True: 4, False: 5] - | Branch (LL:20): [True: 4, False: 1] - ------------------ - LL| | a -= 1; - LL| | b -= 1; - LL| | } - LL| |} - LL| | - LL| |#[coverage(off)] - LL| |fn main() { - LL| | while_cond(); - LL| | while_cond_not(); - LL| | while_op_and(); - LL| | while_op_or(); - LL| |} - diff --git a/tests/coverage/branch/no-mir-spans.rs b/tests/coverage/branch/no-mir-spans.rs deleted file mode 100644 index acb268f2d4554..0000000000000 --- a/tests/coverage/branch/no-mir-spans.rs +++ /dev/null @@ -1,62 +0,0 @@ -#![feature(coverage_attribute)] -//@ edition: 2021 -//@ compile-flags: -Zcoverage-options=branch,no-mir-spans -//@ llvm-cov-flags: --show-branches=count - -// Tests the behaviour of the `-Zcoverage-options=no-mir-spans` debugging flag. -// The actual code below is just some non-trivial code copied from another test -// (`while.rs`), and has no particular significance. - -macro_rules! no_merge { - () => { - for _ in 0..1 {} - }; -} - -fn while_cond() { - no_merge!(); - - let mut a = 8; - while a > 0 { - a -= 1; - } -} - -fn while_cond_not() { - no_merge!(); - - let mut a = 8; - while !(a == 0) { - a -= 1; - } -} - -fn while_op_and() { - no_merge!(); - - let mut a = 8; - let mut b = 4; - while a > 0 && b > 0 { - a -= 1; - b -= 1; - } -} - -fn while_op_or() { - no_merge!(); - - let mut a = 4; - let mut b = 8; - while a > 0 || b > 0 { - a -= 1; - b -= 1; - } -} - -#[coverage(off)] -fn main() { - while_cond(); - while_cond_not(); - while_op_and(); - while_op_or(); -} diff --git a/tests/ui/instrument-coverage/coverage-options.bad.stderr b/tests/ui/instrument-coverage/coverage-options.bad.stderr index 1a6b30dc8324b..4a272cf97fb00 100644 --- a/tests/ui/instrument-coverage/coverage-options.bad.stderr +++ b/tests/ui/instrument-coverage/coverage-options.bad.stderr @@ -1,2 +1,2 @@ -error: incorrect value `bad` for unstable option `coverage-options` - `block` | `branch` | `condition` | `mcdc` | `no-mir-spans` was expected +error: incorrect value `bad` for unstable option `coverage-options` - `block` | `branch` | `condition` | `mcdc` was expected From fb39d3ed880d7f9f2f1ef67e0ddd0d0b8af9e5ae Mon Sep 17 00:00:00 2001 From: Zalathar Date: Sat, 2 Aug 2025 18:28:03 +1000 Subject: [PATCH 107/118] coverage: Push async special case down into `extract_refined_covspans` --- .../rustc_mir_transform/src/coverage/mappings.rs | 16 +++------------- .../rustc_mir_transform/src/coverage/spans.rs | 14 +++++++++++++- 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/compiler/rustc_mir_transform/src/coverage/mappings.rs b/compiler/rustc_mir_transform/src/coverage/mappings.rs index c79b76d90f29c..b0e24cf2bdb86 100644 --- a/compiler/rustc_mir_transform/src/coverage/mappings.rs +++ b/compiler/rustc_mir_transform/src/coverage/mappings.rs @@ -10,7 +10,7 @@ use rustc_middle::ty::TyCtxt; use rustc_span::Span; use crate::coverage::ExtractedHirInfo; -use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph, START_BCB}; +use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph}; use crate::coverage::spans::extract_refined_covspans; use crate::coverage::unexpand::unexpand_into_body_span; use crate::errors::MCDCExceedsTestVectorLimit; @@ -82,18 +82,8 @@ pub(super) fn extract_all_mapping_info_from_mir<'tcx>( let mut mcdc_degraded_branches = vec![]; let mut mcdc_mappings = vec![]; - if hir_info.is_async_fn { - // An async function desugars into a function that returns a future, - // with the user code wrapped in a closure. Any spans in the desugared - // outer function will be unhelpful, so just keep the signature span - // and ignore all of the spans in the MIR body. - if let Some(span) = hir_info.fn_sig_span { - code_mappings.push(CodeMapping { span, bcb: START_BCB }); - } - } else { - // Extract coverage spans from MIR statements/terminators as normal. - extract_refined_covspans(tcx, mir_body, hir_info, graph, &mut code_mappings); - } + // Extract ordinary code mappings from MIR statement/terminator spans. + extract_refined_covspans(tcx, mir_body, hir_info, graph, &mut code_mappings); branch_pairs.extend(extract_branch_pairs(mir_body, hir_info, graph)); diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs index ddeae093df5b7..0ee42abb19564 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans.rs @@ -1,5 +1,6 @@ use rustc_data_structures::fx::FxHashSet; use rustc_middle::mir; +use rustc_middle::mir::coverage::START_BCB; use rustc_middle::ty::TyCtxt; use rustc_span::source_map::SourceMap; use rustc_span::{BytePos, DesugaringKind, ExpnKind, MacroKind, Span}; @@ -16,8 +17,19 @@ pub(super) fn extract_refined_covspans<'tcx>( mir_body: &mir::Body<'tcx>, hir_info: &ExtractedHirInfo, graph: &CoverageGraph, - code_mappings: &mut impl Extend, + code_mappings: &mut Vec, ) { + if hir_info.is_async_fn { + // An async function desugars into a function that returns a future, + // with the user code wrapped in a closure. Any spans in the desugared + // outer function will be unhelpful, so just keep the signature span + // and ignore all of the spans in the MIR body. + if let Some(span) = hir_info.fn_sig_span { + code_mappings.push(mappings::CodeMapping { span, bcb: START_BCB }); + } + return; + } + let &ExtractedHirInfo { body_span, .. } = hir_info; let raw_spans = from_mir::extract_raw_spans_from_mir(mir_body, graph); From f496e83fe9a12cbaec6a61da83ba32159a87afde Mon Sep 17 00:00:00 2001 From: Zalathar Date: Sat, 2 Aug 2025 18:41:04 +1000 Subject: [PATCH 108/118] coverage: Simplify access to debug/testing `-Zcoverage-options` flags --- .../src/coverageinfo/mapgen/covfun.rs | 2 +- compiler/rustc_session/src/config.rs | 1 + compiler/rustc_session/src/session.rs | 12 +++++++----- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs index 69dfe1e98db15..fd1e7f7f160ab 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs @@ -131,7 +131,7 @@ fn fill_region_tables<'tcx>( // codegen needs to handle that gracefully to avoid #133606. // It's hard for tests to trigger this organically, so instead we set // `-Zcoverage-options=discard-all-spans-in-codegen` to force it to occur. - let discard_all = tcx.sess.coverage_discard_all_spans_in_codegen(); + let discard_all = tcx.sess.coverage_options().discard_all_spans_in_codegen; let make_coords = |span: Span| { if discard_all { None } else { spans::make_coords(source_map, &source_file, span) } }; diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index e82f4527c6cdc..cfeadf3c7595a 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -182,6 +182,7 @@ pub enum InstrumentCoverage { pub struct CoverageOptions { pub level: CoverageLevel, + /// **(internal test-only flag)** /// `-Zcoverage-options=discard-all-spans-in-codegen`: During codegen, /// discard all coverage spans as though they were invalid. Needed by /// regression tests for #133606, because we don't have an easy way to diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index c311a726aa7d0..b94636fea94e1 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -39,8 +39,8 @@ use rustc_target::spec::{ use crate::code_stats::CodeStats; pub use crate::code_stats::{DataTypeKind, FieldInfo, FieldKind, SizeKind, VariantInfo}; use crate::config::{ - self, CoverageLevel, CrateType, DebugInfo, ErrorOutputType, FunctionReturn, Input, - InstrumentCoverage, OptLevel, OutFileName, OutputType, RemapPathScopeComponents, + self, CoverageLevel, CoverageOptions, CrateType, DebugInfo, ErrorOutputType, FunctionReturn, + Input, InstrumentCoverage, OptLevel, OutFileName, OutputType, RemapPathScopeComponents, SwitchWithOptPath, }; use crate::filesearch::FileSearch; @@ -359,9 +359,11 @@ impl Session { && self.opts.unstable_opts.coverage_options.level >= CoverageLevel::Mcdc } - /// True if `-Zcoverage-options=discard-all-spans-in-codegen` was passed. - pub fn coverage_discard_all_spans_in_codegen(&self) -> bool { - self.opts.unstable_opts.coverage_options.discard_all_spans_in_codegen + /// Provides direct access to the `CoverageOptions` struct, so that + /// individual flags for debugging/testing coverage instrumetation don't + /// need separate accessors. + pub fn coverage_options(&self) -> &CoverageOptions { + &self.opts.unstable_opts.coverage_options } pub fn is_sanitizer_cfi_enabled(&self) -> bool { From b37c214ec2bbb7609eb4b963bb2b0da160fef57f Mon Sep 17 00:00:00 2001 From: Zalathar Date: Sun, 3 Aug 2025 14:35:17 +1000 Subject: [PATCH 109/118] coverage: Represent `CovmapVersion` as an enum Using an enum here was historically not worth the extra hassle, but now we can lean on `#[derive(TryFromU32)]` to hide most of the boilerplate. --- .../src/coverageinfo/mapgen.rs | 47 +++++++++++++------ 1 file changed, 32 insertions(+), 15 deletions(-) diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs index 8c9dfcfd18c2c..d1cb95507d91c 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs @@ -1,3 +1,4 @@ +use std::assert_matches::assert_matches; use std::sync::Arc; use itertools::Itertools; @@ -5,6 +6,7 @@ use rustc_abi::Align; use rustc_codegen_ssa::traits::{BaseTypeCodegenMethods, ConstCodegenMethods}; use rustc_data_structures::fx::FxIndexMap; use rustc_index::IndexVec; +use rustc_macros::TryFromU32; use rustc_middle::ty::TyCtxt; use rustc_session::RemapFileNameExt; use rustc_session::config::RemapPathScopeComponents; @@ -20,6 +22,23 @@ mod covfun; mod spans; mod unused; +/// Version number that will be included the `__llvm_covmap` section header. +/// Corresponds to LLVM's `llvm::coverage::CovMapVersion` (in `CoverageMapping.h`), +/// or at least the subset that we know and care about. +/// +/// Note that version `n` is encoded as `(n-1)`. +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, TryFromU32)] +enum CovmapVersion { + /// Used by LLVM 18 onwards. + Version7 = 6, +} + +impl CovmapVersion { + fn to_u32(self) -> u32 { + self as u32 + } +} + /// Generates and exports the coverage map, which is embedded in special /// linker sections in the final binary. /// @@ -29,19 +48,13 @@ pub(crate) fn finalize(cx: &mut CodegenCx<'_, '_>) { let tcx = cx.tcx; // Ensure that LLVM is using a version of the coverage mapping format that - // agrees with our Rust-side code. Expected versions (encoded as n-1) are: - // - `CovMapVersion::Version7` (6) used by LLVM 18-19 - let covmap_version = { - let llvm_covmap_version = llvm_cov::mapping_version(); - let expected_versions = 6..=6; - assert!( - expected_versions.contains(&llvm_covmap_version), - "Coverage mapping version exposed by `llvm-wrapper` is out of sync; \ - expected {expected_versions:?} but was {llvm_covmap_version}" - ); - // This is the version number that we will embed in the covmap section: - llvm_covmap_version - }; + // agrees with our Rust-side code. Expected versions are: + // - `Version7` (6) used by LLVM 18 onwards. + let covmap_version = + CovmapVersion::try_from(llvm_cov::mapping_version()).unwrap_or_else(|raw_version: u32| { + panic!("unknown coverage mapping version reported by `llvm-wrapper`: {raw_version}") + }); + assert_matches!(covmap_version, CovmapVersion::Version7); debug!("Generating coverage map for CodegenUnit: `{}`", cx.codegen_unit.name()); @@ -201,7 +214,11 @@ impl VirtualFileMapping { /// Generates the contents of the covmap record for this CGU, which mostly /// consists of a header and a list of filenames. The record is then stored /// as a global variable in the `__llvm_covmap` section. -fn generate_covmap_record<'ll>(cx: &mut CodegenCx<'ll, '_>, version: u32, filenames_buffer: &[u8]) { +fn generate_covmap_record<'ll>( + cx: &mut CodegenCx<'ll, '_>, + version: CovmapVersion, + filenames_buffer: &[u8], +) { // A covmap record consists of four target-endian u32 values, followed by // the encoded filenames table. Two of the header fields are unused in // modern versions of the LLVM coverage mapping format, and are always 0. @@ -212,7 +229,7 @@ fn generate_covmap_record<'ll>(cx: &mut CodegenCx<'ll, '_>, version: u32, filena cx.const_u32(0), // (unused) cx.const_u32(filenames_buffer.len() as u32), cx.const_u32(0), // (unused) - cx.const_u32(version), + cx.const_u32(version.to_u32()), ], /* packed */ false, ); From b8d1af5e5633ff1ac1c9c9a3921af2d8e8620410 Mon Sep 17 00:00:00 2001 From: The rustc-josh-sync Cronjob Bot Date: Mon, 4 Aug 2025 04:24:59 +0000 Subject: [PATCH 110/118] Prepare for merging from rust-lang/rust This updates the rust-version file to 383b9c447b61641e1f1a3850253944a897a60827. --- src/doc/rustc-dev-guide/rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/doc/rustc-dev-guide/rust-version b/src/doc/rustc-dev-guide/rust-version index 1ced6098acf4b..e9f1626f1fdd4 100644 --- a/src/doc/rustc-dev-guide/rust-version +++ b/src/doc/rustc-dev-guide/rust-version @@ -1 +1 @@ -32e7a4b92b109c24e9822c862a7c74436b50e564 +383b9c447b61641e1f1a3850253944a897a60827 From 450040f2d39ae0f5ec7889d48d1a7aa4d2c08c5b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Fri, 1 Aug 2025 14:58:06 +0200 Subject: [PATCH 111/118] Implement debugging output of the bootstrap Step graph into a DOT file --- src/bootstrap/src/bin/main.rs | 3 + src/bootstrap/src/core/builder/mod.rs | 25 ++- src/bootstrap/src/lib.rs | 12 +- src/bootstrap/src/utils/mod.rs | 3 + src/bootstrap/src/utils/step_graph.rs | 176 ++++++++++++++++++ .../bootstrapping/debugging-bootstrap.md | 6 + 6 files changed, 223 insertions(+), 2 deletions(-) create mode 100644 src/bootstrap/src/utils/step_graph.rs diff --git a/src/bootstrap/src/bin/main.rs b/src/bootstrap/src/bin/main.rs index 181d71f63c2f5..cf24fedaebb15 100644 --- a/src/bootstrap/src/bin/main.rs +++ b/src/bootstrap/src/bin/main.rs @@ -159,6 +159,9 @@ fn main() { if is_bootstrap_profiling_enabled() { build.report_summary(start_time); } + + #[cfg(feature = "tracing")] + build.report_step_graph(); } fn check_version(config: &Config) -> Option { diff --git a/src/bootstrap/src/core/builder/mod.rs b/src/bootstrap/src/core/builder/mod.rs index 96289a63785e2..20f3fee1c6cb6 100644 --- a/src/bootstrap/src/core/builder/mod.rs +++ b/src/bootstrap/src/core/builder/mod.rs @@ -77,7 +77,7 @@ impl Deref for Builder<'_> { /// type's [`Debug`] implementation. /// /// (Trying to debug-print `dyn Any` results in the unhelpful `"Any { .. }"`.) -trait AnyDebug: Any + Debug {} +pub trait AnyDebug: Any + Debug {} impl AnyDebug for T {} impl dyn AnyDebug { /// Equivalent to `::downcast_ref`. @@ -197,6 +197,14 @@ impl StepMetadata { // For everything else, a stage N things gets built by a stage N-1 compiler. .map(|compiler| if self.name == "std" { compiler.stage } else { compiler.stage + 1 })) } + + pub fn get_name(&self) -> &str { + &self.name + } + + pub fn get_target(&self) -> TargetSelection { + self.target + } } pub struct RunConfig<'a> { @@ -1657,9 +1665,24 @@ You have to build a stage1 compiler for `{}` first, and then use it to build a s if let Some(out) = self.cache.get(&step) { self.verbose_than(1, || println!("{}c {:?}", " ".repeat(stack.len()), step)); + #[cfg(feature = "tracing")] + { + if let Some(parent) = stack.last() { + let mut graph = self.build.step_graph.borrow_mut(); + graph.register_cached_step(&step, parent, self.config.dry_run()); + } + } return out; } self.verbose_than(1, || println!("{}> {:?}", " ".repeat(stack.len()), step)); + + #[cfg(feature = "tracing")] + { + let parent = stack.last(); + let mut graph = self.build.step_graph.borrow_mut(); + graph.register_step_execution(&step, parent, self.config.dry_run()); + } + stack.push(Box::new(step.clone())); } diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs index 011b52df97bbd..e49513a21160b 100644 --- a/src/bootstrap/src/lib.rs +++ b/src/bootstrap/src/lib.rs @@ -188,7 +188,6 @@ pub enum GitRepo { /// although most functions are implemented as free functions rather than /// methods specifically on this structure itself (to make it easier to /// organize). -#[derive(Clone)] pub struct Build { /// User-specified configuration from `bootstrap.toml`. config: Config, @@ -244,6 +243,9 @@ pub struct Build { #[cfg(feature = "build-metrics")] metrics: crate::utils::metrics::BuildMetrics, + + #[cfg(feature = "tracing")] + step_graph: std::cell::RefCell, } #[derive(Debug, Clone)] @@ -547,6 +549,9 @@ impl Build { #[cfg(feature = "build-metrics")] metrics: crate::utils::metrics::BuildMetrics::init(), + + #[cfg(feature = "tracing")] + step_graph: std::cell::RefCell::new(crate::utils::step_graph::StepGraph::default()), }; // If local-rust is the same major.minor as the current version, then force a @@ -2024,6 +2029,11 @@ to download LLVM rather than building it. pub fn report_summary(&self, start_time: Instant) { self.config.exec_ctx.profiler().report_summary(start_time); } + + #[cfg(feature = "tracing")] + pub fn report_step_graph(self) { + self.step_graph.into_inner().store_to_dot_files(); + } } impl AsRef for Build { diff --git a/src/bootstrap/src/utils/mod.rs b/src/bootstrap/src/utils/mod.rs index 169fcec303e90..97d8d274e8fb6 100644 --- a/src/bootstrap/src/utils/mod.rs +++ b/src/bootstrap/src/utils/mod.rs @@ -19,5 +19,8 @@ pub(crate) mod tracing; #[cfg(feature = "build-metrics")] pub(crate) mod metrics; +#[cfg(feature = "tracing")] +pub(crate) mod step_graph; + #[cfg(test)] pub(crate) mod tests; diff --git a/src/bootstrap/src/utils/step_graph.rs b/src/bootstrap/src/utils/step_graph.rs new file mode 100644 index 0000000000000..b1db9e61fda7d --- /dev/null +++ b/src/bootstrap/src/utils/step_graph.rs @@ -0,0 +1,176 @@ +use std::collections::{HashMap, HashSet}; +use std::fmt::Debug; +use std::io::BufWriter; + +use crate::core::builder::{AnyDebug, Step}; + +/// Records the executed steps and their dependencies in a directed graph, +/// which can then be rendered into a DOT file for visualization. +/// +/// The graph visualizes the first execution of a step with a solid edge, +/// and cached executions of steps with a dashed edge. +/// If you only want to see first executions, you can modify the code in `DotGraph` to +/// always set `cached: false`. +#[derive(Default)] +pub struct StepGraph { + /// We essentially store one graph per dry run mode. + graphs: HashMap, +} + +impl StepGraph { + pub fn register_step_execution( + &mut self, + step: &S, + parent: Option<&Box>, + dry_run: bool, + ) { + let key = get_graph_key(dry_run); + let graph = self.graphs.entry(key.to_string()).or_insert_with(|| DotGraph::default()); + + // The debug output of the step sort of serves as the unique identifier of it. + // We use it to access the node ID of parents to generate edges. + // We could probably also use addresses on the heap from the `Box`, but this seems less + // magical. + let node_key = render_step(step); + + let label = if let Some(metadata) = step.metadata() { + format!( + "{}{} [{}]", + metadata.get_name(), + metadata.get_stage().map(|s| format!(" stage {s}")).unwrap_or_default(), + metadata.get_target() + ) + } else { + let type_name = std::any::type_name::(); + type_name + .strip_prefix("bootstrap::core::") + .unwrap_or(type_name) + .strip_prefix("build_steps::") + .unwrap_or(type_name) + .to_string() + }; + + let node = Node { label, tooltip: node_key.clone() }; + let node_handle = graph.add_node(node_key, node); + + if let Some(parent) = parent { + let parent_key = render_step(parent); + if let Some(src_node_handle) = graph.get_handle_by_key(&parent_key) { + graph.add_edge(src_node_handle, node_handle); + } + } + } + + pub fn register_cached_step( + &mut self, + step: &S, + parent: &Box, + dry_run: bool, + ) { + let key = get_graph_key(dry_run); + let graph = self.graphs.get_mut(key).unwrap(); + + let node_key = render_step(step); + let parent_key = render_step(parent); + + if let Some(src_node_handle) = graph.get_handle_by_key(&parent_key) { + if let Some(dst_node_handle) = graph.get_handle_by_key(&node_key) { + graph.add_cached_edge(src_node_handle, dst_node_handle); + } + } + } + + pub fn store_to_dot_files(self) { + for (key, graph) in self.graphs.into_iter() { + let filename = format!("bootstrap-steps{key}.dot"); + graph.render(&filename).unwrap(); + } + } +} + +fn get_graph_key(dry_run: bool) -> &'static str { + if dry_run { ".dryrun" } else { "" } +} + +struct Node { + label: String, + tooltip: String, +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +struct NodeHandle(usize); + +#[derive(PartialEq, Eq, Hash, PartialOrd, Ord)] +struct Edge { + src: NodeHandle, + dst: NodeHandle, + cached: bool, +} + +// We could use a library for this, but they either: +// - require lifetimes, which gets annoying (dot_writer) +// - don't support tooltips (dot_graph) +// - have a lot of dependencies (graphviz_rust) +// - only have SVG export (layout-rs) +// - use a builder pattern that is very annoying to use here (tabbycat) +#[derive(Default)] +struct DotGraph { + nodes: Vec, + /// The `NodeHandle` represents an index within `self.nodes` + edges: HashSet, + key_to_index: HashMap, +} + +impl DotGraph { + fn add_node(&mut self, key: String, node: Node) -> NodeHandle { + let handle = NodeHandle(self.nodes.len()); + self.nodes.push(node); + self.key_to_index.insert(key, handle); + handle + } + + fn add_edge(&mut self, src: NodeHandle, dst: NodeHandle) { + self.edges.insert(Edge { src, dst, cached: false }); + } + + fn add_cached_edge(&mut self, src: NodeHandle, dst: NodeHandle) { + self.edges.insert(Edge { src, dst, cached: true }); + } + + fn get_handle_by_key(&self, key: &str) -> Option { + self.key_to_index.get(key).copied() + } + + fn render(&self, path: &str) -> std::io::Result<()> { + use std::io::Write; + + let mut file = BufWriter::new(std::fs::File::create(path)?); + writeln!(file, "digraph bootstrap_steps {{")?; + for (index, node) in self.nodes.iter().enumerate() { + writeln!( + file, + r#"{index} [label="{}", tooltip="{}"]"#, + escape(&node.label), + escape(&node.tooltip) + )?; + } + + let mut edges: Vec<&Edge> = self.edges.iter().collect(); + edges.sort(); + for edge in edges { + let style = if edge.cached { "dashed" } else { "solid" }; + writeln!(file, r#"{} -> {} [style="{style}"]"#, edge.src.0, edge.dst.0)?; + } + + writeln!(file, "}}") + } +} + +fn render_step(step: &dyn Debug) -> String { + format!("{step:?}") +} + +/// Normalizes the string so that it can be rendered into a DOT file. +fn escape(input: &str) -> String { + input.replace("\"", "\\\"") +} diff --git a/src/doc/rustc-dev-guide/src/building/bootstrapping/debugging-bootstrap.md b/src/doc/rustc-dev-guide/src/building/bootstrapping/debugging-bootstrap.md index c9c0d64a604e2..9c5ebbd36c465 100644 --- a/src/doc/rustc-dev-guide/src/building/bootstrapping/debugging-bootstrap.md +++ b/src/doc/rustc-dev-guide/src/building/bootstrapping/debugging-bootstrap.md @@ -123,6 +123,12 @@ if [#96176][cleanup-compiler-for] is resolved. [cleanup-compiler-for]: https://github.com/rust-lang/rust/issues/96176 +### Rendering step graph + +When you run bootstrap with the `BOOTSTRAP_TRACING` environment variable configured, bootstrap will automatically output a DOT file that shows all executed steps and their dependencies. The files will have a prefix `bootstrap-steps`. You can use e.g. `xdot` to visualize the file or e.g. `dot -Tsvg` to convert the DOT file to a SVG file. + +A separate DOT file will be outputted for dry-run and non-dry-run execution. + ### Using `tracing` in bootstrap Both `tracing::*` macros and the `tracing::instrument` proc-macro attribute need to be gated behind `tracing` feature. Examples: From 2f4b40fe4ebd61943ed10f25e6406be6ad68f18e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Fri, 1 Aug 2025 15:14:58 +0200 Subject: [PATCH 112/118] Do not render both cached and uncached edge between two steps --- src/bootstrap/src/utils/step_graph.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/bootstrap/src/utils/step_graph.rs b/src/bootstrap/src/utils/step_graph.rs index b1db9e61fda7d..c45825a42223a 100644 --- a/src/bootstrap/src/utils/step_graph.rs +++ b/src/bootstrap/src/utils/step_graph.rs @@ -100,10 +100,12 @@ struct Node { #[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] struct NodeHandle(usize); +/// Represents a dependency between two bootstrap steps. #[derive(PartialEq, Eq, Hash, PartialOrd, Ord)] struct Edge { src: NodeHandle, dst: NodeHandle, + // Was the corresponding execution of a step cached, or was the step actually executed? cached: bool, } @@ -134,7 +136,11 @@ impl DotGraph { } fn add_cached_edge(&mut self, src: NodeHandle, dst: NodeHandle) { - self.edges.insert(Edge { src, dst, cached: true }); + // There's no point in rendering both cached and uncached edge + let uncached = Edge { src, dst, cached: false }; + if !self.edges.contains(&uncached) { + self.edges.insert(Edge { src, dst, cached: true }); + } } fn get_handle_by_key(&self, key: &str) -> Option { From 4f94bbf13d21c6707510ca4faa2cf90683fd1369 Mon Sep 17 00:00:00 2001 From: Waffle Lapkin Date: Sat, 2 Aug 2025 10:49:47 +0200 Subject: [PATCH 113/118] drive-by cleanup: fix outdated documentation --- compiler/rustc_middle/src/ty/sty.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 806079788615b..72474a6056696 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -1456,7 +1456,7 @@ impl<'tcx> Ty<'tcx> { } } - /// Returns the type and mutability of `*ty`. + /// Returns the type of `*ty`. /// /// The parameter `explicit` indicates if this is an *explicit* dereference. /// Some types -- notably raw ptrs -- can only be dereferenced explicitly. From 8b65f3d0e8991fb68b843968229a84e8e8c09e75 Mon Sep 17 00:00:00 2001 From: Waffle Lapkin Date: Sat, 2 Aug 2025 10:49:47 +0200 Subject: [PATCH 114/118] properly reject tail calls to `&FnPtr` or `&FnDef` --- .../rustc_mir_build/src/check_tail_calls.rs | 51 ++++++++++++++++++- .../explicit-tail-calls/callee_is_ref.fixed | 26 ++++++++++ tests/ui/explicit-tail-calls/callee_is_ref.rs | 26 ++++++++++ .../explicit-tail-calls/callee_is_ref.stderr | 38 ++++++++++++++ .../ui/explicit-tail-calls/callee_is_weird.rs | 29 +++++++++++ .../callee_is_weird.stderr | 26 ++++++++++ 6 files changed, 194 insertions(+), 2 deletions(-) create mode 100644 tests/ui/explicit-tail-calls/callee_is_ref.fixed create mode 100644 tests/ui/explicit-tail-calls/callee_is_ref.rs create mode 100644 tests/ui/explicit-tail-calls/callee_is_ref.stderr create mode 100644 tests/ui/explicit-tail-calls/callee_is_weird.rs create mode 100644 tests/ui/explicit-tail-calls/callee_is_weird.stderr diff --git a/compiler/rustc_mir_build/src/check_tail_calls.rs b/compiler/rustc_mir_build/src/check_tail_calls.rs index 6ed100899d8c5..b4c8b20e50f9c 100644 --- a/compiler/rustc_mir_build/src/check_tail_calls.rs +++ b/compiler/rustc_mir_build/src/check_tail_calls.rs @@ -95,9 +95,15 @@ impl<'tcx> TailCallCkVisitor<'_, 'tcx> { // So we have to check for them in this weird way... let parent = self.tcx.parent(did); if self.tcx.fn_trait_kind_from_def_id(parent).is_some() - && args.first().and_then(|arg| arg.as_type()).is_some_and(Ty::is_closure) + && let Some(this) = args.first() + && let Some(this) = this.as_type() { - self.report_calling_closure(&self.thir[fun], args[1].as_type().unwrap(), expr); + if this.is_closure() { + self.report_calling_closure(&self.thir[fun], args[1].as_type().unwrap(), expr); + } else { + // This can happen when tail calling `Box` that wraps a function + self.report_nonfn_callee(fn_span, self.thir[fun].span, this); + } // Tail calling is likely to cause unrelated errors (ABI, argument mismatches), // skip them, producing an error about calling a closure is enough. @@ -109,6 +115,13 @@ impl<'tcx> TailCallCkVisitor<'_, 'tcx> { } } + let (ty::FnDef(..) | ty::FnPtr(..)) = ty.kind() else { + self.report_nonfn_callee(fn_span, self.thir[fun].span, ty); + + // `fn_sig` below panics otherwise + return; + }; + // Erase regions since tail calls don't care about lifetimes let callee_sig = self.tcx.normalize_erasing_late_bound_regions(self.typing_env, ty.fn_sig(self.tcx)); @@ -294,6 +307,40 @@ impl<'tcx> TailCallCkVisitor<'_, 'tcx> { self.found_errors = Err(err); } + fn report_nonfn_callee(&mut self, call_sp: Span, fun_sp: Span, ty: Ty<'_>) { + let mut err = self + .tcx + .dcx() + .struct_span_err( + call_sp, + "tail calls can only be performed with function definitions or pointers", + ) + .with_note(format!("callee has type `{ty}`")); + + let mut ty = ty; + let mut refs = 0; + while ty.is_box() || ty.is_ref() { + ty = ty.builtin_deref(false).unwrap(); + refs += 1; + } + + if refs > 0 && ty.is_fn() { + let thing = if ty.is_fn_ptr() { "pointer" } else { "definition" }; + + let derefs = + std::iter::once('(').chain(std::iter::repeat_n('*', refs)).collect::(); + + err.multipart_suggestion( + format!("consider dereferencing the expression to get a function {thing}"), + vec![(fun_sp.shrink_to_lo(), derefs), (fun_sp.shrink_to_hi(), ")".to_owned())], + Applicability::MachineApplicable, + ); + } + + let err = err.emit(); + self.found_errors = Err(err); + } + fn report_abi_mismatch(&mut self, sp: Span, caller_abi: ExternAbi, callee_abi: ExternAbi) { let err = self .tcx diff --git a/tests/ui/explicit-tail-calls/callee_is_ref.fixed b/tests/ui/explicit-tail-calls/callee_is_ref.fixed new file mode 100644 index 0000000000000..7525e5c5df84b --- /dev/null +++ b/tests/ui/explicit-tail-calls/callee_is_ref.fixed @@ -0,0 +1,26 @@ +//@ run-rustfix +#![feature(explicit_tail_calls)] +#![expect(incomplete_features)] + +fn f() {} + +fn g() { + become (*(&f))() //~ error: tail calls can only be performed with function definitions or pointers +} + +fn h() { + let table = [f as fn()]; + if let Some(fun) = table.get(0) { + become (*fun)(); //~ error: tail calls can only be performed with function definitions or pointers + } +} + +fn i() { + become (***Box::new(&mut &f))(); //~ error: tail calls can only be performed with function definitions or pointers +} + +fn main() { + g(); + h(); + i(); +} diff --git a/tests/ui/explicit-tail-calls/callee_is_ref.rs b/tests/ui/explicit-tail-calls/callee_is_ref.rs new file mode 100644 index 0000000000000..36bf9efb95224 --- /dev/null +++ b/tests/ui/explicit-tail-calls/callee_is_ref.rs @@ -0,0 +1,26 @@ +//@ run-rustfix +#![feature(explicit_tail_calls)] +#![expect(incomplete_features)] + +fn f() {} + +fn g() { + become (&f)() //~ error: tail calls can only be performed with function definitions or pointers +} + +fn h() { + let table = [f as fn()]; + if let Some(fun) = table.get(0) { + become fun(); //~ error: tail calls can only be performed with function definitions or pointers + } +} + +fn i() { + become Box::new(&mut &f)(); //~ error: tail calls can only be performed with function definitions or pointers +} + +fn main() { + g(); + h(); + i(); +} diff --git a/tests/ui/explicit-tail-calls/callee_is_ref.stderr b/tests/ui/explicit-tail-calls/callee_is_ref.stderr new file mode 100644 index 0000000000000..4a2ff465e6820 --- /dev/null +++ b/tests/ui/explicit-tail-calls/callee_is_ref.stderr @@ -0,0 +1,38 @@ +error: tail calls can only be performed with function definitions or pointers + --> $DIR/callee_is_ref.rs:8:12 + | +LL | become (&f)() + | ^^^^^^ + | + = note: callee has type `&fn() {f}` +help: consider dereferencing the expression to get a function definition + | +LL | become (*(&f))() + | ++ + + +error: tail calls can only be performed with function definitions or pointers + --> $DIR/callee_is_ref.rs:14:16 + | +LL | become fun(); + | ^^^^^ + | + = note: callee has type `&fn()` +help: consider dereferencing the expression to get a function pointer + | +LL | become (*fun)(); + | ++ + + +error: tail calls can only be performed with function definitions or pointers + --> $DIR/callee_is_ref.rs:19:12 + | +LL | become Box::new(&mut &f)(); + | ^^^^^^^^^^^^^^^^^^^ + | + = note: callee has type `Box<&mut &fn() {f}>` +help: consider dereferencing the expression to get a function definition + | +LL | become (***Box::new(&mut &f))(); + | ++++ + + +error: aborting due to 3 previous errors + diff --git a/tests/ui/explicit-tail-calls/callee_is_weird.rs b/tests/ui/explicit-tail-calls/callee_is_weird.rs new file mode 100644 index 0000000000000..b3ca878c232c2 --- /dev/null +++ b/tests/ui/explicit-tail-calls/callee_is_weird.rs @@ -0,0 +1,29 @@ +#![feature(explicit_tail_calls, exclusive_wrapper, fn_traits, unboxed_closures)] +#![expect(incomplete_features)] + +fn f() {} + +fn g() { + become std::sync::Exclusive::new(f)() //~ error: tail calls can only be performed with function definitions or pointers +} + +fn h() { + become (&mut &std::sync::Exclusive::new(f))() //~ error: tail calls can only be performed with function definitions or pointers +} + +fn i() { + struct J; + + impl FnOnce<()> for J { + type Output = (); + extern "rust-call" fn call_once(self, (): ()) -> Self::Output {} + } + + become J(); //~ error: tail calls can only be performed with function definitions or pointers +} + +fn main() { + g(); + h(); + i(); +} diff --git a/tests/ui/explicit-tail-calls/callee_is_weird.stderr b/tests/ui/explicit-tail-calls/callee_is_weird.stderr new file mode 100644 index 0000000000000..a4e5a38ce3320 --- /dev/null +++ b/tests/ui/explicit-tail-calls/callee_is_weird.stderr @@ -0,0 +1,26 @@ +error: tail calls can only be performed with function definitions or pointers + --> $DIR/callee_is_weird.rs:7:12 + | +LL | become std::sync::Exclusive::new(f)() + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: callee has type `Exclusive` + +error: tail calls can only be performed with function definitions or pointers + --> $DIR/callee_is_weird.rs:11:12 + | +LL | become (&mut &std::sync::Exclusive::new(f))() + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: callee has type `Exclusive` + +error: tail calls can only be performed with function definitions or pointers + --> $DIR/callee_is_weird.rs:22:12 + | +LL | become J(); + | ^^^ + | + = note: callee has type `J` + +error: aborting due to 3 previous errors + From e3ed3e0f1c8156b0958c7653f001a8d99d5990f6 Mon Sep 17 00:00:00 2001 From: Waffle Lapkin Date: Mon, 4 Aug 2025 09:41:08 +0200 Subject: [PATCH 115/118] small refactor of `InterpResult` - don't need type alias to default type argument - `Residual` impl allows to use more std APIs (like `<[T; N]>::try_map`) --- compiler/rustc_middle/src/lib.rs | 1 + .../rustc_middle/src/mir/interpret/error.rs | 41 ++++++++++--------- 2 files changed, 22 insertions(+), 20 deletions(-) diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs index 803b645c8f76d..e5cc23c213d22 100644 --- a/compiler/rustc_middle/src/lib.rs +++ b/compiler/rustc_middle/src/lib.rs @@ -57,6 +57,7 @@ #![feature(sized_hierarchy)] #![feature(try_blocks)] #![feature(try_trait_v2)] +#![feature(try_trait_v2_residual)] #![feature(try_trait_v2_yeet)] #![feature(type_alias_impl_trait)] #![feature(yeet_expr)] diff --git a/compiler/rustc_middle/src/mir/interpret/error.rs b/compiler/rustc_middle/src/mir/interpret/error.rs index 3e895c6b28095..77427a12d1182 100644 --- a/compiler/rustc_middle/src/mir/interpret/error.rs +++ b/compiler/rustc_middle/src/mir/interpret/error.rs @@ -793,36 +793,37 @@ impl Drop for Guard { /// We also make things panic if this type is ever implicitly dropped. #[derive(Debug)] #[must_use] -pub struct InterpResult_<'tcx, T> { +pub struct InterpResult<'tcx, T = ()> { res: Result>, guard: Guard, } -// Type alias to be able to set a default type argument. -pub type InterpResult<'tcx, T = ()> = InterpResult_<'tcx, T>; - -impl<'tcx, T> ops::Try for InterpResult_<'tcx, T> { +impl<'tcx, T> ops::Try for InterpResult<'tcx, T> { type Output = T; - type Residual = InterpResult_<'tcx, convert::Infallible>; + type Residual = InterpResult<'tcx, convert::Infallible>; #[inline] fn from_output(output: Self::Output) -> Self { - InterpResult_::new(Ok(output)) + InterpResult::new(Ok(output)) } #[inline] fn branch(self) -> ops::ControlFlow { match self.disarm() { Ok(v) => ops::ControlFlow::Continue(v), - Err(e) => ops::ControlFlow::Break(InterpResult_::new(Err(e))), + Err(e) => ops::ControlFlow::Break(InterpResult::new(Err(e))), } } } -impl<'tcx, T> ops::FromResidual for InterpResult_<'tcx, T> { +impl<'tcx, T> ops::Residual for InterpResult<'tcx, convert::Infallible> { + type TryType = InterpResult<'tcx, T>; +} + +impl<'tcx, T> ops::FromResidual for InterpResult<'tcx, T> { #[inline] #[track_caller] - fn from_residual(residual: InterpResult_<'tcx, convert::Infallible>) -> Self { + fn from_residual(residual: InterpResult<'tcx, convert::Infallible>) -> Self { match residual.disarm() { Err(e) => Self::new(Err(e)), } @@ -830,7 +831,7 @@ impl<'tcx, T> ops::FromResidual for InterpResult_<'tcx, T> { } // Allow `yeet`ing `InterpError` in functions returning `InterpResult_`. -impl<'tcx, T> ops::FromResidual>> for InterpResult_<'tcx, T> { +impl<'tcx, T> ops::FromResidual>> for InterpResult<'tcx, T> { #[inline] fn from_residual(ops::Yeet(e): ops::Yeet>) -> Self { Self::new(Err(e.into())) @@ -840,7 +841,7 @@ impl<'tcx, T> ops::FromResidual>> for InterpResu // Allow `?` on `Result<_, InterpError>` in functions returning `InterpResult_`. // This is useful e.g. for `option.ok_or_else(|| err_ub!(...))`. impl<'tcx, T, E: Into>> ops::FromResidual> - for InterpResult_<'tcx, T> + for InterpResult<'tcx, T> { #[inline] fn from_residual(residual: Result) -> Self { @@ -863,7 +864,7 @@ impl<'tcx, T, V: FromIterator> FromIterator> for Interp } } -impl<'tcx, T> InterpResult_<'tcx, T> { +impl<'tcx, T> InterpResult<'tcx, T> { #[inline(always)] fn new(res: Result>) -> Self { Self { res, guard: Guard } @@ -890,7 +891,7 @@ impl<'tcx, T> InterpResult_<'tcx, T> { #[inline] pub fn map(self, f: impl FnOnce(T) -> U) -> InterpResult<'tcx, U> { - InterpResult_::new(self.disarm().map(f)) + InterpResult::new(self.disarm().map(f)) } #[inline] @@ -898,7 +899,7 @@ impl<'tcx, T> InterpResult_<'tcx, T> { self, f: impl FnOnce(InterpErrorInfo<'tcx>) -> InterpErrorInfo<'tcx>, ) -> InterpResult<'tcx, T> { - InterpResult_::new(self.disarm().map_err(f)) + InterpResult::new(self.disarm().map_err(f)) } #[inline] @@ -906,7 +907,7 @@ impl<'tcx, T> InterpResult_<'tcx, T> { self, f: impl FnOnce(InterpErrorKind<'tcx>) -> InterpErrorKind<'tcx>, ) -> InterpResult<'tcx, T> { - InterpResult_::new(self.disarm().map_err(|mut e| { + InterpResult::new(self.disarm().map_err(|mut e| { e.0.kind = f(e.0.kind); e })) @@ -914,7 +915,7 @@ impl<'tcx, T> InterpResult_<'tcx, T> { #[inline] pub fn inspect_err_kind(self, f: impl FnOnce(&InterpErrorKind<'tcx>)) -> InterpResult<'tcx, T> { - InterpResult_::new(self.disarm().inspect_err(|e| f(&e.0.kind))) + InterpResult::new(self.disarm().inspect_err(|e| f(&e.0.kind))) } #[inline] @@ -937,7 +938,7 @@ impl<'tcx, T> InterpResult_<'tcx, T> { #[inline] pub fn and_then(self, f: impl FnOnce(T) -> InterpResult<'tcx, U>) -> InterpResult<'tcx, U> { - InterpResult_::new(self.disarm().and_then(|t| f(t).disarm())) + InterpResult::new(self.disarm().and_then(|t| f(t).disarm())) } /// Returns success if both `self` and `other` succeed, while ensuring we don't @@ -952,7 +953,7 @@ impl<'tcx, T> InterpResult_<'tcx, T> { // Discard the other error. drop(other.disarm()); // Return `self`. - InterpResult_::new(Err(e)) + InterpResult::new(Err(e)) } } } @@ -960,5 +961,5 @@ impl<'tcx, T> InterpResult_<'tcx, T> { #[inline(always)] pub fn interp_ok<'tcx, T>(x: T) -> InterpResult<'tcx, T> { - InterpResult_::new(Ok(x)) + InterpResult::new(Ok(x)) } From cf7b67420b9927c6154b43bab17917183d931f93 Mon Sep 17 00:00:00 2001 From: Waffle Lapkin Date: Mon, 4 Aug 2025 09:41:08 +0200 Subject: [PATCH 116/118] add `project_fields` helper function --- .../rustc_const_eval/src/interpret/projection.rs | 9 +++++++++ .../rustc_const_eval/src/interpret/visitor.rs | 15 +++++++-------- compiler/rustc_const_eval/src/lib.rs | 1 + .../rustc_const_eval/src/util/caller_location.rs | 10 +++++----- 4 files changed, 22 insertions(+), 13 deletions(-) diff --git a/compiler/rustc_const_eval/src/interpret/projection.rs b/compiler/rustc_const_eval/src/interpret/projection.rs index f72c441808140..d05871bfc773c 100644 --- a/compiler/rustc_const_eval/src/interpret/projection.rs +++ b/compiler/rustc_const_eval/src/interpret/projection.rs @@ -199,6 +199,15 @@ where base.offset_with_meta(offset, OffsetMode::Inbounds, meta, field_layout, self) } + /// Projects multiple fields at once. See [`Self::project_field`] for details. + pub fn project_fields, const N: usize>( + &self, + base: &P, + fields: [FieldIdx; N], + ) -> InterpResult<'tcx, [P; N]> { + fields.try_map(|field| self.project_field(base, field)) + } + /// Downcasting to an enum variant. pub fn project_downcast>( &self, diff --git a/compiler/rustc_const_eval/src/interpret/visitor.rs b/compiler/rustc_const_eval/src/interpret/visitor.rs index a27b664613159..82c50fac6c0ee 100644 --- a/compiler/rustc_const_eval/src/interpret/visitor.rs +++ b/compiler/rustc_const_eval/src/interpret/visitor.rs @@ -121,25 +121,24 @@ pub trait ValueVisitor<'tcx, M: Machine<'tcx>>: Sized { // `Box` has two fields: the pointer we care about, and the allocator. assert_eq!(v.layout().fields.count(), 2, "`Box` must have exactly 2 fields"); - let (unique_ptr, alloc) = ( - self.ecx().project_field(v, FieldIdx::ZERO)?, - self.ecx().project_field(v, FieldIdx::ONE)?, - ); + let [unique_ptr, alloc] = + self.ecx().project_fields(v, [FieldIdx::ZERO, FieldIdx::ONE])?; + // Unfortunately there is some type junk in the way here: `unique_ptr` is a `Unique`... // (which means another 2 fields, the second of which is a `PhantomData`) assert_eq!(unique_ptr.layout().fields.count(), 2); - let (nonnull_ptr, phantom) = ( - self.ecx().project_field(&unique_ptr, FieldIdx::ZERO)?, - self.ecx().project_field(&unique_ptr, FieldIdx::ONE)?, - ); + let [nonnull_ptr, phantom] = + self.ecx().project_fields(&unique_ptr, [FieldIdx::ZERO, FieldIdx::ONE])?; assert!( phantom.layout().ty.ty_adt_def().is_some_and(|adt| adt.is_phantom_data()), "2nd field of `Unique` should be PhantomData but is {:?}", phantom.layout().ty, ); + // ... that contains a `NonNull`... (gladly, only a single field here) assert_eq!(nonnull_ptr.layout().fields.count(), 1); let raw_ptr = self.ecx().project_field(&nonnull_ptr, FieldIdx::ZERO)?; // the actual raw ptr + // ... whose only field finally is a raw ptr we can dereference. self.visit_box(ty, &raw_ptr)?; diff --git a/compiler/rustc_const_eval/src/lib.rs b/compiler/rustc_const_eval/src/lib.rs index bf7a79dcb20f0..3840cdf757507 100644 --- a/compiler/rustc_const_eval/src/lib.rs +++ b/compiler/rustc_const_eval/src/lib.rs @@ -2,6 +2,7 @@ #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] #![doc(rust_logo)] +#![feature(array_try_map)] #![feature(assert_matches)] #![feature(box_patterns)] #![feature(decl_macro)] diff --git a/compiler/rustc_const_eval/src/util/caller_location.rs b/compiler/rustc_const_eval/src/util/caller_location.rs index c437934eaabe3..5249b32eca469 100644 --- a/compiler/rustc_const_eval/src/util/caller_location.rs +++ b/compiler/rustc_const_eval/src/util/caller_location.rs @@ -42,12 +42,12 @@ fn alloc_caller_location<'tcx>( let location = ecx.allocate(loc_layout, MemoryKind::CallerLocation).unwrap(); // Initialize fields. - ecx.write_immediate(filename, &ecx.project_field(&location, FieldIdx::from_u32(0)).unwrap()) - .expect("writing to memory we just allocated cannot fail"); - ecx.write_scalar(line, &ecx.project_field(&location, FieldIdx::from_u32(1)).unwrap()) - .expect("writing to memory we just allocated cannot fail"); - ecx.write_scalar(col, &ecx.project_field(&location, FieldIdx::from_u32(2)).unwrap()) + let [filename_field, line_field, col_field] = + ecx.project_fields(&location, [0, 1, 2].map(FieldIdx::from_u32)).unwrap(); + ecx.write_immediate(filename, &filename_field) .expect("writing to memory we just allocated cannot fail"); + ecx.write_scalar(line, &line_field).expect("writing to memory we just allocated cannot fail"); + ecx.write_scalar(col, &col_field).expect("writing to memory we just allocated cannot fail"); location } From 51f60d113980889e11e094199e047d99e669e6aa Mon Sep 17 00:00:00 2001 From: Jieyou Xu Date: Mon, 4 Aug 2025 17:27:25 +0800 Subject: [PATCH 117/118] Remove `tcp-stress.rs` test This stress test was originally introduced in 65cca4bd3fa0abe1000662014b3e3ea1420728f5 to detect a UAF in libuv (see RUST-12823), but we no longer use libuv, so remove this test as it was causing flaky timeout failures. See RUST-144878 for discussion. --- tests/ui/threads-sendsync/tcp-stress.rs | 64 ------------------------- 1 file changed, 64 deletions(-) delete mode 100644 tests/ui/threads-sendsync/tcp-stress.rs diff --git a/tests/ui/threads-sendsync/tcp-stress.rs b/tests/ui/threads-sendsync/tcp-stress.rs deleted file mode 100644 index b2f76a55fb976..0000000000000 --- a/tests/ui/threads-sendsync/tcp-stress.rs +++ /dev/null @@ -1,64 +0,0 @@ -//@ run-pass -//@ ignore-android needs extra network permissions -//@ needs-threads -//@ ignore-netbsd system ulimit (Too many open files) -//@ ignore-openbsd system ulimit (Too many open files) - -use std::io::prelude::*; -use std::net::{TcpListener, TcpStream}; -use std::process; -use std::sync::mpsc::channel; -use std::thread::{self, Builder}; -use std::time::Duration; - -const TARGET_CNT: usize = 200; - -fn main() { - // This test has a chance to time out, try to not let it time out - thread::spawn(move || -> () { - thread::sleep(Duration::from_secs(30)); - process::exit(1); - }); - - let listener = TcpListener::bind("127.0.0.1:0").unwrap(); - let addr = listener.local_addr().unwrap(); - thread::spawn(move || -> () { - loop { - let mut stream = match listener.accept() { - Ok(stream) => stream.0, - Err(_) => continue, - }; - let _ = stream.read(&mut [0]); - let _ = stream.write(&[2]); - } - }); - - let (tx, rx) = channel(); - - let mut spawned_cnt = 0; - for _ in 0..TARGET_CNT { - let tx = tx.clone(); - let res = Builder::new().stack_size(64 * 1024).spawn(move || { - match TcpStream::connect(addr) { - Ok(mut stream) => { - let _ = stream.write(&[1]); - let _ = stream.read(&mut [0]); - } - Err(..) => {} - } - tx.send(()).unwrap(); - }); - if let Ok(_) = res { - spawned_cnt += 1; - }; - } - - // Wait for all clients to exit, but don't wait for the server to exit. The - // server just runs infinitely. - drop(tx); - for _ in 0..spawned_cnt { - rx.recv().unwrap(); - } - assert_eq!(spawned_cnt, TARGET_CNT); - process::exit(0); -} From 878acaa79531f68efe43edf0d788b6e0f881bc18 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Mon, 4 Aug 2025 18:35:32 +0000 Subject: [PATCH 118/118] Dont print arg span in MIR dump for tail call --- compiler/rustc_middle/src/mir/pretty.rs | 6 +++--- .../building/custom/terminators.tail_call.built.after.mir | 2 +- ...il_call_drops.f_with_arg.ElaborateDrops.panic-abort.diff | 2 +- ...l_call_drops.f_with_arg.ElaborateDrops.panic-unwind.diff | 2 +- .../tail_call_drops.f_with_arg.built.after.panic-abort.mir | 2 +- .../tail_call_drops.f_with_arg.built.after.panic-unwind.mir | 2 +- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index 809cdb329f79e..d440d6852c909 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -970,11 +970,11 @@ impl<'tcx> TerminatorKind<'tcx> { Call { func, args, destination, .. } => { write!(fmt, "{destination:?} = ")?; write!(fmt, "{func:?}(")?; - for (index, arg) in args.iter().map(|a| &a.node).enumerate() { + for (index, arg) in args.iter().enumerate() { if index > 0 { write!(fmt, ", ")?; } - write!(fmt, "{arg:?}")?; + write!(fmt, "{:?}", arg.node)?; } write!(fmt, ")") } @@ -984,7 +984,7 @@ impl<'tcx> TerminatorKind<'tcx> { if index > 0 { write!(fmt, ", ")?; } - write!(fmt, "{:?}", arg)?; + write!(fmt, "{:?}", arg.node)?; } write!(fmt, ")") } diff --git a/tests/mir-opt/building/custom/terminators.tail_call.built.after.mir b/tests/mir-opt/building/custom/terminators.tail_call.built.after.mir index ab3925dae1c3a..feec68d3b0d17 100644 --- a/tests/mir-opt/building/custom/terminators.tail_call.built.after.mir +++ b/tests/mir-opt/building/custom/terminators.tail_call.built.after.mir @@ -6,6 +6,6 @@ fn tail_call(_1: i32) -> i32 { bb0: { _2 = Add(copy _1, const 42_i32); - tailcall ident::(Spanned { node: copy _2, span: $DIR/terminators.rs:32:28: 32:29 (#0) }); + tailcall ident::(copy _2); } } diff --git a/tests/mir-opt/tail_call_drops.f_with_arg.ElaborateDrops.panic-abort.diff b/tests/mir-opt/tail_call_drops.f_with_arg.ElaborateDrops.panic-abort.diff index a8c57d2cfe009..4fba0032729e6 100644 --- a/tests/mir-opt/tail_call_drops.f_with_arg.ElaborateDrops.panic-abort.diff +++ b/tests/mir-opt/tail_call_drops.f_with_arg.ElaborateDrops.panic-abort.diff @@ -93,7 +93,7 @@ } bb11: { - tailcall g_with_arg(Spanned { node: move _10, span: $DIR/tail_call_drops.rs:36:23: 36:36 (#0) }, Spanned { node: move _11, span: $DIR/tail_call_drops.rs:36:38: 36:51 (#0) }); + tailcall g_with_arg(move _10, move _11); } bb12 (cleanup): { diff --git a/tests/mir-opt/tail_call_drops.f_with_arg.ElaborateDrops.panic-unwind.diff b/tests/mir-opt/tail_call_drops.f_with_arg.ElaborateDrops.panic-unwind.diff index a8c57d2cfe009..4fba0032729e6 100644 --- a/tests/mir-opt/tail_call_drops.f_with_arg.ElaborateDrops.panic-unwind.diff +++ b/tests/mir-opt/tail_call_drops.f_with_arg.ElaborateDrops.panic-unwind.diff @@ -93,7 +93,7 @@ } bb11: { - tailcall g_with_arg(Spanned { node: move _10, span: $DIR/tail_call_drops.rs:36:23: 36:36 (#0) }, Spanned { node: move _11, span: $DIR/tail_call_drops.rs:36:38: 36:51 (#0) }); + tailcall g_with_arg(move _10, move _11); } bb12 (cleanup): { diff --git a/tests/mir-opt/tail_call_drops.f_with_arg.built.after.panic-abort.mir b/tests/mir-opt/tail_call_drops.f_with_arg.built.after.panic-abort.mir index f89b98a320536..9ec358ec18930 100644 --- a/tests/mir-opt/tail_call_drops.f_with_arg.built.after.panic-abort.mir +++ b/tests/mir-opt/tail_call_drops.f_with_arg.built.after.panic-abort.mir @@ -90,7 +90,7 @@ fn f_with_arg(_1: String, _2: String) -> () { } bb11: { - tailcall g_with_arg(Spanned { node: move _10, span: $DIR/tail_call_drops.rs:36:23: 36:36 (#0) }, Spanned { node: move _11, span: $DIR/tail_call_drops.rs:36:38: 36:51 (#0) }); + tailcall g_with_arg(move _10, move _11); } bb12: { diff --git a/tests/mir-opt/tail_call_drops.f_with_arg.built.after.panic-unwind.mir b/tests/mir-opt/tail_call_drops.f_with_arg.built.after.panic-unwind.mir index f89b98a320536..9ec358ec18930 100644 --- a/tests/mir-opt/tail_call_drops.f_with_arg.built.after.panic-unwind.mir +++ b/tests/mir-opt/tail_call_drops.f_with_arg.built.after.panic-unwind.mir @@ -90,7 +90,7 @@ fn f_with_arg(_1: String, _2: String) -> () { } bb11: { - tailcall g_with_arg(Spanned { node: move _10, span: $DIR/tail_call_drops.rs:36:23: 36:36 (#0) }, Spanned { node: move _11, span: $DIR/tail_call_drops.rs:36:38: 36:51 (#0) }); + tailcall g_with_arg(move _10, move _11); } bb12: {