From 72614386f74e3ed8c5907d030ab3d4582acb5c96 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Sun, 3 Aug 2025 19:21:10 -0700 Subject: [PATCH 1/4] Add a MIR test for integer methods in debug --- ...t_and_add.PreCodegen.after.panic-abort.mir | 19 +++++++++++++++++++ ..._and_add.PreCodegen.after.panic-unwind.mir | 19 +++++++++++++++++++ .../pre-codegen/integer_methods_debug.rs | 12 ++++++++++++ 3 files changed, 50 insertions(+) create mode 100644 tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-abort.mir create mode 100644 tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-unwind.mir create mode 100644 tests/mir-opt/pre-codegen/integer_methods_debug.rs diff --git a/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-abort.mir new file mode 100644 index 0000000000000..87a033f693caf --- /dev/null +++ b/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-abort.mir @@ -0,0 +1,19 @@ +// MIR for `cast_and_add` after PreCodegen + +fn cast_and_add(_1: i32) -> u32 { + debug x => _1; + let mut _0: u32; + let mut _2: u32; + + bb0: { + _2 = core::num::::cast_unsigned(copy _1) -> [return: bb1, unwind unreachable]; + } + + bb1: { + _0 = core::num::::wrapping_add(move _2, const 42_u32) -> [return: bb2, unwind unreachable]; + } + + bb2: { + return; + } +} diff --git a/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-unwind.mir new file mode 100644 index 0000000000000..ca5a66668f48e --- /dev/null +++ b/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-unwind.mir @@ -0,0 +1,19 @@ +// MIR for `cast_and_add` after PreCodegen + +fn cast_and_add(_1: i32) -> u32 { + debug x => _1; + let mut _0: u32; + let mut _2: u32; + + bb0: { + _2 = core::num::::cast_unsigned(copy _1) -> [return: bb1, unwind continue]; + } + + bb1: { + _0 = core::num::::wrapping_add(move _2, const 42_u32) -> [return: bb2, unwind continue]; + } + + bb2: { + return; + } +} diff --git a/tests/mir-opt/pre-codegen/integer_methods_debug.rs b/tests/mir-opt/pre-codegen/integer_methods_debug.rs new file mode 100644 index 0000000000000..4ec125cca9ef8 --- /dev/null +++ b/tests/mir-opt/pre-codegen/integer_methods_debug.rs @@ -0,0 +1,12 @@ +//@ compile-flags: -Copt-level=0 -Zmir-opt-level=1 -Cdebuginfo=limited +// EMIT_MIR_FOR_EACH_PANIC_STRATEGY + +#![crate_type = "lib"] + +// EMIT_MIR integer_methods_debug.cast_and_add.PreCodegen.after.mir +pub fn cast_and_add(x: i32) -> u32 { + // CHECK-LABEL: fn cast_and_add(_1: i32) -> u32 + // CHECK: _2 = {{.+}}::cast_unsigned(copy _1) + // CHECK: _0 = {{.+}}::wrapping_add(move _2, const 42_u32) -> + x.cast_unsigned().wrapping_add(42) +} From 7344962401dfac8c10e580c91a7aa67f3b7b59c6 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Fri, 25 Jul 2025 20:15:29 -0700 Subject: [PATCH 2/4] Add `#[rustc_early_inline]` --- .../src/attributes/inline.rs | 13 ++++++ compiler/rustc_attr_parsing/src/context.rs | 3 +- compiler/rustc_codegen_gcc/src/attributes.rs | 2 +- compiler/rustc_codegen_llvm/src/attributes.rs | 4 +- .../rustc_codegen_ssa/src/codegen_attrs.rs | 2 +- compiler/rustc_feature/src/builtin_attrs.rs | 5 +++ .../rustc_hir/src/attrs/data_structures.rs | 15 +++++-- compiler/rustc_middle/src/mir/mono.rs | 2 +- .../src/cross_crate_inline.rs | 6 ++- compiler/rustc_mir_transform/src/inline.rs | 41 ++++++++++--------- compiler/rustc_span/src/symbol.rs | 1 + ...ne.call_early.ForceInline.panic-abort.diff | 32 +++++++++++++++ ...e.call_early.ForceInline.panic-unwind.diff | 32 +++++++++++++++ ...e.early_as_fn.ForceInline.panic-abort.diff | 12 ++++++ ....early_as_fn.ForceInline.panic-unwind.diff | 12 ++++++ tests/mir-opt/inline/early_inline.rs | 32 +++++++++++++++ 16 files changed, 184 insertions(+), 30 deletions(-) create mode 100644 tests/mir-opt/inline/early_inline.call_early.ForceInline.panic-abort.diff create mode 100644 tests/mir-opt/inline/early_inline.call_early.ForceInline.panic-unwind.diff create mode 100644 tests/mir-opt/inline/early_inline.early_as_fn.ForceInline.panic-abort.diff create mode 100644 tests/mir-opt/inline/early_inline.early_as_fn.ForceInline.panic-unwind.diff create mode 100644 tests/mir-opt/inline/early_inline.rs diff --git a/compiler/rustc_attr_parsing/src/attributes/inline.rs b/compiler/rustc_attr_parsing/src/attributes/inline.rs index 8437713206e35..1433872a5cb64 100644 --- a/compiler/rustc_attr_parsing/src/attributes/inline.rs +++ b/compiler/rustc_attr_parsing/src/attributes/inline.rs @@ -93,3 +93,16 @@ impl SingleAttributeParser for RustcForceInlineParser { )) } } + +pub(crate) struct RustcEarlyInlineParser; + +impl SingleAttributeParser for RustcEarlyInlineParser { + const PATH: &'static [Symbol] = &[sym::rustc_early_inline]; + const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost; + const ON_DUPLICATE: OnDuplicate = OnDuplicate::WarnButFutureError; + const TEMPLATE: AttributeTemplate = template!(Word); + + fn convert(cx: &mut AcceptContext<'_, '_, S>, _args: &ArgParser<'_>) -> Option { + Some(AttributeKind::Inline(InlineAttr::Early, cx.attr_span)) + } +} diff --git a/compiler/rustc_attr_parsing/src/context.rs b/compiler/rustc_attr_parsing/src/context.rs index c6599f20c2d1f..ca9f12461bfe7 100644 --- a/compiler/rustc_attr_parsing/src/context.rs +++ b/compiler/rustc_attr_parsing/src/context.rs @@ -23,7 +23,7 @@ use crate::attributes::codegen_attrs::{ use crate::attributes::confusables::ConfusablesParser; use crate::attributes::deprecation::DeprecationParser; use crate::attributes::dummy::DummyParser; -use crate::attributes::inline::{InlineParser, RustcForceInlineParser}; +use crate::attributes::inline::{InlineParser, RustcEarlyInlineParser, RustcForceInlineParser}; use crate::attributes::link_attrs::{ ExportStableParser, FfiConstParser, FfiPureParser, LinkNameParser, LinkOrdinalParser, LinkSectionParser, StdInternalSymbolParser, @@ -169,6 +169,7 @@ attribute_parsers!( Single, Single, Single, + Single, Single, Single, Single, diff --git a/compiler/rustc_codegen_gcc/src/attributes.rs b/compiler/rustc_codegen_gcc/src/attributes.rs index 04b43bb8bb7c4..0198fe7426bfe 100644 --- a/compiler/rustc_codegen_gcc/src/attributes.rs +++ b/compiler/rustc_codegen_gcc/src/attributes.rs @@ -67,7 +67,7 @@ fn inline_attr<'gcc, 'tcx>( Some(FnAttribute::AlwaysInline) } } - InlineAttr::Hint => Some(FnAttribute::Inline), + InlineAttr::Hint | InlineAttr::Early => Some(FnAttribute::Inline), InlineAttr::Force { .. } => Some(FnAttribute::AlwaysInline), InlineAttr::Never => { if cx.sess().target.arch != "amdgpu" { diff --git a/compiler/rustc_codegen_llvm/src/attributes.rs b/compiler/rustc_codegen_llvm/src/attributes.rs index c548f4675834f..6328aec6fd30b 100644 --- a/compiler/rustc_codegen_llvm/src/attributes.rs +++ b/compiler/rustc_codegen_llvm/src/attributes.rs @@ -52,7 +52,9 @@ fn inline_attr<'ll>(cx: &CodegenCx<'ll, '_>, inline: InlineAttr) -> Option<&'ll return Some(AttributeKind::NoInline.create_attr(cx.llcx)); } match inline { - InlineAttr::Hint => Some(AttributeKind::InlineHint.create_attr(cx.llcx)), + InlineAttr::Hint | InlineAttr::Early => { + Some(AttributeKind::InlineHint.create_attr(cx.llcx)) + } InlineAttr::Always | InlineAttr::Force { .. } => { Some(AttributeKind::AlwaysInline.create_attr(cx.llcx)) } diff --git a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs index 7f54a47327af8..1078d911a13b0 100644 --- a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs +++ b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs @@ -473,7 +473,7 @@ fn check_result( // warn that inline has no effect when no_sanitize is present if !codegen_fn_attrs.no_sanitize.is_empty() - && codegen_fn_attrs.inline.always() + && codegen_fn_attrs.inline.always_in_codegen() && let (Some(no_sanitize_span), Some(inline_span)) = (interesting_spans.no_sanitize, interesting_spans.inline) { diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 5c63d4808db2f..75f37db4d661d 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -1123,6 +1123,11 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_no_mir_inline, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::Yes, "`#[rustc_no_mir_inline]` prevents the MIR inliner from inlining a function while not affecting codegen" ), + rustc_attr!( + rustc_early_inline, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::Yes, + "`#[rustc_early_inline]` inlines non-generic calls to trivial methods even in debug, \ + while still allowing them to be codegen'd for generic calls" + ), rustc_attr!( rustc_force_inline, Normal, template!(Word, NameValueStr: "reason"), WarnFollowing, EncodeCrossCrate::Yes, "`#[rustc_force_inline]` forces a free function to be inlined" diff --git a/compiler/rustc_hir/src/attrs/data_structures.rs b/compiler/rustc_hir/src/attrs/data_structures.rs index 80618422b56d6..bd673870946c4 100644 --- a/compiler/rustc_hir/src/attrs/data_structures.rs +++ b/compiler/rustc_hir/src/attrs/data_structures.rs @@ -24,13 +24,20 @@ pub enum InlineAttr { attr_span: Span, reason: Option, }, + /// `#[rustc_early_inline]` will always inline calls to a known impl in MIR. + /// + /// You can think of this as either + /// - Force, but without the "do not codegen as a function ever" restriction. + /// - Always, but only for MIR. + Early, } impl InlineAttr { - pub fn always(&self) -> bool { + pub fn always_in_codegen(&self) -> bool { match self { - InlineAttr::Always | InlineAttr::Force { .. } => true, - InlineAttr::None | InlineAttr::Hint | InlineAttr::Never => false, + InlineAttr::Always => true, + InlineAttr::None | InlineAttr::Hint | InlineAttr::Early | InlineAttr::Never => false, + InlineAttr::Force { .. } => panic!("Shouldn't be codegen'ing {self:?}"), } } } @@ -342,7 +349,7 @@ pub enum AttributeKind { reason: Option, }, - /// Represents `#[inline]` and `#[rustc_force_inline]`. + /// Represents `#[inline]` and `#[rustc_force_inline]` and `#[rustc_early_inline]`. Inline(InlineAttr, Span), /// Represents `#[link_name]`. diff --git a/compiler/rustc_middle/src/mir/mono.rs b/compiler/rustc_middle/src/mir/mono.rs index e5864660575c5..bdd4ce4b98cbc 100644 --- a/compiler/rustc_middle/src/mir/mono.rs +++ b/compiler/rustc_middle/src/mir/mono.rs @@ -205,7 +205,7 @@ impl<'tcx> MonoItem<'tcx> { // To ensure that #[inline(always)] can be inlined as much as possible, especially in unoptimized // builds, we always select LocalCopy. - if codegen_fn_attrs.inline.always() { + if codegen_fn_attrs.inline.always_in_codegen() { return InstantiationMode::LocalCopy; } diff --git a/compiler/rustc_mir_transform/src/cross_crate_inline.rs b/compiler/rustc_mir_transform/src/cross_crate_inline.rs index b186c2bd7758f..de9955c803731 100644 --- a/compiler/rustc_mir_transform/src/cross_crate_inline.rs +++ b/compiler/rustc_mir_transform/src/cross_crate_inline.rs @@ -46,8 +46,10 @@ fn cross_crate_inlinable(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool { // #[inline(never)] to force code generation. match codegen_fn_attrs.inline { InlineAttr::Never => return false, - InlineAttr::Hint | InlineAttr::Always | InlineAttr::Force { .. } => return true, - _ => {} + InlineAttr::Hint | InlineAttr::Always | InlineAttr::Early | InlineAttr::Force { .. } => { + return true; + } + InlineAttr::None => {} } // If the crate is likely to be mostly unused, use cross-crate inlining to defer codegen until diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs index 3d49eb4e8ef75..1fb7fa21029da 100644 --- a/compiler/rustc_mir_transform/src/inline.rs +++ b/compiler/rustc_mir_transform/src/inline.rs @@ -77,7 +77,7 @@ pub struct ForceInline; impl ForceInline { pub fn should_run_pass_for_callee<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool { - matches!(tcx.codegen_fn_attrs(def_id).inline, InlineAttr::Force { .. }) + matches!(tcx.codegen_fn_attrs(def_id).inline, InlineAttr::Force { .. } | InlineAttr::Early) } } @@ -195,7 +195,7 @@ impl<'tcx> Inliner<'tcx> for ForceInliner<'tcx> { &self, callee_attrs: &CodegenFnAttrs, ) -> Result<(), &'static str> { - debug_assert_matches!(callee_attrs.inline, InlineAttr::Force { .. }); + debug_assert_matches!(callee_attrs.inline, InlineAttr::Force { .. } | InlineAttr::Early); Ok(()) } @@ -247,23 +247,26 @@ impl<'tcx> Inliner<'tcx> for ForceInliner<'tcx> { fn on_inline_failure(&self, callsite: &CallSite<'tcx>, reason: &'static str) { let tcx = self.tcx(); - let InlineAttr::Force { attr_span, reason: justification } = - tcx.codegen_fn_attrs(callsite.callee.def_id()).inline - else { - bug!("called on item without required inlining"); - }; - - let call_span = callsite.source_info.span; - tcx.dcx().emit_err(crate::errors::ForceInlineFailure { - call_span, - attr_span, - caller_span: tcx.def_span(self.def_id), - caller: tcx.def_path_str(self.def_id), - callee_span: tcx.def_span(callsite.callee.def_id()), - callee: tcx.def_path_str(callsite.callee.def_id()), - reason, - justification: justification.map(|sym| crate::errors::ForceInlineJustification { sym }), - }); + match tcx.codegen_fn_attrs(callsite.callee.def_id()).inline { + InlineAttr::Early => { + // Ok, we don't actually mind if this fails. + } + InlineAttr::Force { attr_span, reason: justification } => { + let call_span = callsite.source_info.span; + tcx.dcx().emit_err(crate::errors::ForceInlineFailure { + call_span, + attr_span, + caller_span: tcx.def_span(self.def_id), + caller: tcx.def_path_str(self.def_id), + callee_span: tcx.def_span(callsite.callee.def_id()), + callee: tcx.def_path_str(callsite.callee.def_id()), + reason, + justification: justification + .map(|sym| crate::errors::ForceInlineJustification { sym }), + }); + } + _ => bug!("called on item without required inlining"), + } } } diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index d54175548e30e..b0eed1835f95a 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -1848,6 +1848,7 @@ symbols! { rustc_dump_predicates, rustc_dump_user_args, rustc_dump_vtable, + rustc_early_inline, rustc_effective_visibility, rustc_evaluate_where_clauses, rustc_expected_cgu_reuse, diff --git a/tests/mir-opt/inline/early_inline.call_early.ForceInline.panic-abort.diff b/tests/mir-opt/inline/early_inline.call_early.ForceInline.panic-abort.diff new file mode 100644 index 0000000000000..35c60a1c5a64d --- /dev/null +++ b/tests/mir-opt/inline/early_inline.call_early.ForceInline.panic-abort.diff @@ -0,0 +1,32 @@ +- // MIR for `call_early` before ForceInline ++ // MIR for `call_early` after ForceInline + + fn call_early(_1: i32) -> i32 { + debug x => _1; + let mut _0: i32; + let mut _2: i32; ++ let mut _3: i32; ++ scope 1 (inlined do_early) { ++ let mut _4: (i32, bool); ++ } + + bb0: { + StorageLive(_2); + _2 = copy _1; +- _0 = do_early(move _2, const 42_i32) -> [return: bb1, unwind unreachable]; ++ StorageLive(_3); ++ _3 = const 42_i32; ++ StorageLive(_4); ++ _4 = AddWithOverflow(copy _2, copy _3); ++ assert(!move (_4.1: bool), "attempt to compute `{} + {}`, which would overflow", copy _2, copy _3) -> [success: bb1, unwind unreachable]; + } + + bb1: { ++ _0 = move (_4.0: i32); ++ StorageDead(_4); ++ StorageDead(_3); + StorageDead(_2); + return; + } + } + diff --git a/tests/mir-opt/inline/early_inline.call_early.ForceInline.panic-unwind.diff b/tests/mir-opt/inline/early_inline.call_early.ForceInline.panic-unwind.diff new file mode 100644 index 0000000000000..ece03733fb506 --- /dev/null +++ b/tests/mir-opt/inline/early_inline.call_early.ForceInline.panic-unwind.diff @@ -0,0 +1,32 @@ +- // MIR for `call_early` before ForceInline ++ // MIR for `call_early` after ForceInline + + fn call_early(_1: i32) -> i32 { + debug x => _1; + let mut _0: i32; + let mut _2: i32; ++ let mut _3: i32; ++ scope 1 (inlined do_early) { ++ let mut _4: (i32, bool); ++ } + + bb0: { + StorageLive(_2); + _2 = copy _1; +- _0 = do_early(move _2, const 42_i32) -> [return: bb1, unwind continue]; ++ StorageLive(_3); ++ _3 = const 42_i32; ++ StorageLive(_4); ++ _4 = AddWithOverflow(copy _2, copy _3); ++ assert(!move (_4.1: bool), "attempt to compute `{} + {}`, which would overflow", copy _2, copy _3) -> [success: bb1, unwind continue]; + } + + bb1: { ++ _0 = move (_4.0: i32); ++ StorageDead(_4); ++ StorageDead(_3); + StorageDead(_2); + return; + } + } + diff --git a/tests/mir-opt/inline/early_inline.early_as_fn.ForceInline.panic-abort.diff b/tests/mir-opt/inline/early_inline.early_as_fn.ForceInline.panic-abort.diff new file mode 100644 index 0000000000000..731df96de43ce --- /dev/null +++ b/tests/mir-opt/inline/early_inline.early_as_fn.ForceInline.panic-abort.diff @@ -0,0 +1,12 @@ +- // MIR for `early_as_fn` before ForceInline ++ // MIR for `early_as_fn` after ForceInline + + fn early_as_fn() -> fn(i32, i32) -> i32 { + let mut _0: fn(i32, i32) -> i32; + + bb0: { + _0 = do_early as fn(i32, i32) -> i32 (PointerCoercion(ReifyFnPointer, Implicit)); + return; + } + } + diff --git a/tests/mir-opt/inline/early_inline.early_as_fn.ForceInline.panic-unwind.diff b/tests/mir-opt/inline/early_inline.early_as_fn.ForceInline.panic-unwind.diff new file mode 100644 index 0000000000000..731df96de43ce --- /dev/null +++ b/tests/mir-opt/inline/early_inline.early_as_fn.ForceInline.panic-unwind.diff @@ -0,0 +1,12 @@ +- // MIR for `early_as_fn` before ForceInline ++ // MIR for `early_as_fn` after ForceInline + + fn early_as_fn() -> fn(i32, i32) -> i32 { + let mut _0: fn(i32, i32) -> i32; + + bb0: { + _0 = do_early as fn(i32, i32) -> i32 (PointerCoercion(ReifyFnPointer, Implicit)); + return; + } + } + diff --git a/tests/mir-opt/inline/early_inline.rs b/tests/mir-opt/inline/early_inline.rs new file mode 100644 index 0000000000000..9f2932fea7a3d --- /dev/null +++ b/tests/mir-opt/inline/early_inline.rs @@ -0,0 +1,32 @@ +//@ compile-flags: -Copt-level=0 -Zmir-opt-level=1 -Cdebuginfo=limited +// EMIT_MIR_FOR_EACH_PANIC_STRATEGY + +#![feature(rustc_attrs)] + +#[rustc_early_inline] +fn do_early(x: i32, y: i32) -> i32 { + x + y +} + +// EMIT_MIR early_inline.early_as_fn.ForceInline.diff +fn early_as_fn() -> fn(i32, i32) -> i32 { + // CHECK-LABEL: fn early_as_fn() -> fn(i32, i32) -> i32 + // CHECK: _0 = do_early as fn(i32, i32) -> i32 (PointerCoercion(ReifyFnPointer, Implicit)); + do_early +} + +// EMIT_MIR early_inline.call_early.ForceInline.diff +fn call_early(x: i32) -> i32 { + // CHECK-LABEL: fn call_early(_1: i32) -> i32 + // CHECK: (inlined do_early) + // CHECK: _2 = const 42_i32; + // CHECK: _3 = AddWithOverflow(copy _1, copy _2); + // CHECK: _0 = move (_3.0: i32); + do_early(x, 42) +} + +fn main() { + let f = early_as_fn(); + let _z = f(1, 2); + call_early(7); +} From 096c41f53d0ec65a13849150204a6116a3817bf0 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Sun, 3 Aug 2025 19:43:31 -0700 Subject: [PATCH 3/4] Make `cast_(un)signed` and `wrapping_add` be `rustc_early_inline` (rather than `always`) Obviously there's way more that could do this, but I don't to do *all* of them at once. --- library/core/src/num/int_macros.rs | 4 ++-- library/core/src/num/uint_macros.rs | 4 ++-- ...p_forward.PreCodegen.after.panic-abort.mir | 20 +++++++++---------- ..._forward.PreCodegen.after.panic-unwind.mir | 20 +++++++++---------- ...t_and_add.PreCodegen.after.panic-abort.mir | 12 +++++------ ..._and_add.PreCodegen.after.panic-unwind.mir | 12 +++++------ .../pre-codegen/integer_methods_debug.rs | 6 ++++-- 7 files changed, 38 insertions(+), 40 deletions(-) diff --git a/library/core/src/num/int_macros.rs b/library/core/src/num/int_macros.rs index 5683d5ec92dc7..5dc9d97de4b3f 100644 --- a/library/core/src/num/int_macros.rs +++ b/library/core/src/num/int_macros.rs @@ -225,7 +225,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "integer_sign_cast", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn cast_unsigned(self) -> $UnsignedT { self as $UnsignedT } @@ -1915,7 +1915,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_add(self, rhs: Self) -> Self { intrinsics::wrapping_add(self, rhs) } diff --git a/library/core/src/num/uint_macros.rs b/library/core/src/num/uint_macros.rs index 584cd60fbe5cc..a70d64e6bb401 100644 --- a/library/core/src/num/uint_macros.rs +++ b/library/core/src/num/uint_macros.rs @@ -277,7 +277,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "integer_sign_cast", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn cast_signed(self) -> $SignedT { self as $SignedT } @@ -2070,7 +2070,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_add(self, rhs: Self) -> Self { intrinsics::wrapping_add(self, rhs) } diff --git a/tests/mir-opt/pre-codegen/checked_ops.step_forward.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/checked_ops.step_forward.PreCodegen.after.panic-abort.mir index 83478e60b5d4e..36213435e54c6 100644 --- a/tests/mir-opt/pre-codegen/checked_ops.step_forward.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/checked_ops.step_forward.PreCodegen.after.panic-abort.mir @@ -8,29 +8,29 @@ fn step_forward(_1: u16, _2: usize) -> u16 { let mut _8: u16; scope 2 { } - scope 3 (inlined ::forward_checked) { - scope 4 { - scope 6 (inlined core::num::::checked_add) { + scope 3 (inlined core::num::::wrapping_add) { + } + scope 4 (inlined ::forward_checked) { + scope 5 { + scope 7 (inlined core::num::::checked_add) { let mut _5: (u16, bool); let mut _6: bool; - scope 7 (inlined std::intrinsics::unlikely) { + scope 8 (inlined std::intrinsics::unlikely) { let _7: (); } } } - scope 5 (inlined convert::num::ptr_try_from_impls:: for u16>::try_from) { + scope 6 (inlined convert::num::ptr_try_from_impls:: for u16>::try_from) { let mut _3: bool; let mut _4: u16; } } - scope 8 (inlined Option::::is_none) { - scope 9 (inlined Option::::is_some) { - scope 10 { + scope 9 (inlined Option::::is_none) { + scope 10 (inlined Option::::is_some) { + scope 11 { } } } - scope 11 (inlined core::num::::wrapping_add) { - } } bb0: { diff --git a/tests/mir-opt/pre-codegen/checked_ops.step_forward.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/checked_ops.step_forward.PreCodegen.after.panic-unwind.mir index ac7a6e0445191..228b3851b6199 100644 --- a/tests/mir-opt/pre-codegen/checked_ops.step_forward.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/checked_ops.step_forward.PreCodegen.after.panic-unwind.mir @@ -8,29 +8,29 @@ fn step_forward(_1: u16, _2: usize) -> u16 { let mut _8: u16; scope 2 { } - scope 3 (inlined ::forward_checked) { - scope 4 { - scope 6 (inlined core::num::::checked_add) { + scope 3 (inlined core::num::::wrapping_add) { + } + scope 4 (inlined ::forward_checked) { + scope 5 { + scope 7 (inlined core::num::::checked_add) { let mut _5: (u16, bool); let mut _6: bool; - scope 7 (inlined std::intrinsics::unlikely) { + scope 8 (inlined std::intrinsics::unlikely) { let _7: (); } } } - scope 5 (inlined convert::num::ptr_try_from_impls:: for u16>::try_from) { + scope 6 (inlined convert::num::ptr_try_from_impls:: for u16>::try_from) { let mut _3: bool; let mut _4: u16; } } - scope 8 (inlined Option::::is_none) { - scope 9 (inlined Option::::is_some) { - scope 10 { + scope 9 (inlined Option::::is_none) { + scope 10 (inlined Option::::is_some) { + scope 11 { } } } - scope 11 (inlined core::num::::wrapping_add) { - } } bb0: { diff --git a/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-abort.mir index 87a033f693caf..afcff20fd4d06 100644 --- a/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-abort.mir @@ -4,16 +4,14 @@ fn cast_and_add(_1: i32) -> u32 { debug x => _1; let mut _0: u32; let mut _2: u32; - - bb0: { - _2 = core::num::::cast_unsigned(copy _1) -> [return: bb1, unwind unreachable]; + scope 1 (inlined core::num::::cast_unsigned) { } - - bb1: { - _0 = core::num::::wrapping_add(move _2, const 42_u32) -> [return: bb2, unwind unreachable]; + scope 2 (inlined core::num::::wrapping_add) { } - bb2: { + bb0: { + _2 = copy _1 as u32 (IntToInt); + _0 = Add(copy _2, const 42_u32); return; } } diff --git a/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-unwind.mir index ca5a66668f48e..afcff20fd4d06 100644 --- a/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/integer_methods_debug.cast_and_add.PreCodegen.after.panic-unwind.mir @@ -4,16 +4,14 @@ fn cast_and_add(_1: i32) -> u32 { debug x => _1; let mut _0: u32; let mut _2: u32; - - bb0: { - _2 = core::num::::cast_unsigned(copy _1) -> [return: bb1, unwind continue]; + scope 1 (inlined core::num::::cast_unsigned) { } - - bb1: { - _0 = core::num::::wrapping_add(move _2, const 42_u32) -> [return: bb2, unwind continue]; + scope 2 (inlined core::num::::wrapping_add) { } - bb2: { + bb0: { + _2 = copy _1 as u32 (IntToInt); + _0 = Add(copy _2, const 42_u32); return; } } diff --git a/tests/mir-opt/pre-codegen/integer_methods_debug.rs b/tests/mir-opt/pre-codegen/integer_methods_debug.rs index 4ec125cca9ef8..11f3d91a16885 100644 --- a/tests/mir-opt/pre-codegen/integer_methods_debug.rs +++ b/tests/mir-opt/pre-codegen/integer_methods_debug.rs @@ -6,7 +6,9 @@ // EMIT_MIR integer_methods_debug.cast_and_add.PreCodegen.after.mir pub fn cast_and_add(x: i32) -> u32 { // CHECK-LABEL: fn cast_and_add(_1: i32) -> u32 - // CHECK: _2 = {{.+}}::cast_unsigned(copy _1) - // CHECK: _0 = {{.+}}::wrapping_add(move _2, const 42_u32) -> + // CHECK: (inlined {{.+}}::cast_unsigned) + // CHECK: (inlined {{.+}}::wrapping_add) + // CHECK: _2 = copy _1 as u32 (IntToInt); + // CHECK: _0 = Add(copy _2, const 42_u32); x.cast_unsigned().wrapping_add(42) } From 075af0c5e07cb4f47eba25b5d98551f5de2e5eb7 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Tue, 5 Aug 2025 21:10:28 -0700 Subject: [PATCH 4/4] [PERF ONLY] Spam it all over and see how things differ --- library/alloc/src/boxed.rs | 6 +- .../alloc/src/collections/binary_heap/mod.rs | 2 +- library/alloc/src/collections/linked_list.rs | 4 +- library/alloc/src/ffi/c_str.rs | 2 +- library/alloc/src/rc.rs | 4 +- library/alloc/src/task.rs | 10 +- .../tests/sort/known_good_stable_sort.rs | 8 +- library/core/src/alloc/layout.rs | 2 +- library/core/src/cell.rs | 16 +- library/core/src/convert/mod.rs | 2 +- library/core/src/hint.rs | 14 +- library/core/src/intrinsics/mod.rs | 4 +- library/core/src/mem/manually_drop.rs | 4 +- library/core/src/mem/maybe_uninit.rs | 30 ++-- library/core/src/mem/mod.rs | 10 +- library/core/src/num/int_macros.rs | 72 ++++----- library/core/src/num/mod.rs | 2 +- library/core/src/num/nonzero.rs | 26 ++-- library/core/src/num/uint_macros.rs | 106 ++++++------- library/core/src/option.rs | 2 +- library/core/src/pin.rs | 24 +-- library/core/src/pin/unsafe_pinned.rs | 14 +- library/core/src/ptr/const_ptr.rs | 38 ++--- library/core/src/ptr/mod.rs | 32 ++-- library/core/src/ptr/mut_ptr.rs | 78 +++++----- library/core/src/ptr/non_null.rs | 44 +++--- library/core/src/result.rs | 2 +- library/core/src/slice/index.rs | 4 +- library/core/src/slice/iter/macros.rs | 6 +- library/core/src/slice/mod.rs | 4 +- library/core/src/slice/sort/shared/mod.rs | 2 +- library/core/src/slice/sort/shared/pivot.rs | 2 +- .../core/src/slice/sort/shared/smallsort.rs | 4 +- library/core/src/slice/sort/stable/drift.rs | 14 +- library/core/src/slice/sort/stable/mod.rs | 2 +- library/core/src/slice/sort/stable/tiny.rs | 2 +- .../core/src/slice/sort/unstable/heapsort.rs | 2 +- library/core/src/slice/sort/unstable/mod.rs | 2 +- library/core/src/str/mod.rs | 8 +- ...n.DataflowConstProp.32bit.panic-abort.diff | 8 +- ....DataflowConstProp.32bit.panic-unwind.diff | 8 +- ...n.DataflowConstProp.64bit.panic-abort.diff | 8 +- ....DataflowConstProp.64bit.panic-unwind.diff | 8 +- ...oxed_slice.main.GVN.32bit.panic-abort.diff | 8 +- ...xed_slice.main.GVN.32bit.panic-unwind.diff | 8 +- ...oxed_slice.main.GVN.64bit.panic-abort.diff | 8 +- ...xed_slice.main.GVN.64bit.panic-unwind.diff | 8 +- ...tr.main.DataflowConstProp.panic-abort.diff | 12 +- ...r.main.DataflowConstProp.panic-unwind.diff | 12 +- ...ng_operand.test.GVN.32bit.panic-abort.diff | 16 +- ...g_operand.test.GVN.32bit.panic-unwind.diff | 132 ++++++++++++++-- ...ng_operand.test.GVN.64bit.panic-abort.diff | 16 +- ...g_operand.test.GVN.64bit.panic-unwind.diff | 132 ++++++++++++++-- ...vn.slice_const_length.GVN.panic-abort.diff | 11 +- ...n.slice_const_length.GVN.panic-unwind.diff | 11 +- tests/mir-opt/gvn.slices.GVN.panic-abort.diff | 62 ++++---- .../mir-opt/gvn.slices.GVN.panic-unwind.diff | 62 ++++---- .../gvn_ptr_eq_with_constant.main.GVN.diff | 20 +-- ...ine_coroutine.main.Inline.panic-abort.diff | 47 +++--- ...ne_coroutine.main.Inline.panic-unwind.diff | 73 +++++---- .../inline_shims.drop.Inline.panic-abort.diff | 14 +- tests/mir-opt/inline/unchecked_shifts.rs | 4 +- ...gned_smaller.ForceInline.panic-abort.diff} | 4 +- ...ned_smaller.ForceInline.panic-unwind.diff} | 4 +- ...igned_bigger.ForceInline.panic-abort.diff} | 4 +- ...gned_bigger.ForceInline.panic-unwind.diff} | 4 +- ...y.run2-{closure#0}.Inline.panic-abort.diff | 37 ++--- ....run2-{closure#0}.Inline.panic-unwind.diff | 34 ++--- .../issue_101973.inner.GVN.panic-abort.diff | 8 +- .../issue_101973.inner.GVN.panic-unwind.diff | 8 +- ...git.PreCodegen.after.32bit.panic-abort.mir | 18 +-- ...it.PreCodegen.after.32bit.panic-unwind.mir | 18 +-- ...git.PreCodegen.after.64bit.panic-abort.mir | 18 +-- ...it.PreCodegen.after.64bit.panic-unwind.mir | 18 +-- ...ace.PreCodegen.after.32bit.panic-abort.mir | 4 +- ...ce.PreCodegen.after.32bit.panic-unwind.mir | 4 +- ...ace.PreCodegen.after.64bit.panic-abort.mir | 4 +- ...ce.PreCodegen.after.64bit.panic-unwind.mir | 4 +- ...d_constant.main.GVN.32bit.panic-abort.diff | 36 ++--- ..._constant.main.GVN.32bit.panic-unwind.diff | 81 +++++++--- ...d_constant.main.GVN.64bit.panic-abort.diff | 36 ++--- ..._constant.main.GVN.64bit.panic-unwind.diff | 81 +++++++--- ...ked_range.PreCodegen.after.panic-abort.mir | 10 +- ...ed_range.PreCodegen.after.panic-unwind.mir | 10 +- ...ated_loop.PreCodegen.after.panic-abort.mir | 38 ++--- ...ted_loop.PreCodegen.after.panic-unwind.mir | 14 +- ...ward_loop.PreCodegen.after.panic-abort.mir | 38 ++--- ...ard_loop.PreCodegen.after.panic-unwind.mir | 38 ++--- ...erse_loop.PreCodegen.after.panic-abort.mir | 14 +- ...rse_loop.PreCodegen.after.panic-unwind.mir | 14 +- ...iter_next.PreCodegen.after.panic-abort.mir | 24 +-- ...ter_next.PreCodegen.after.panic-unwind.mir | 24 +-- ..._to_slice.PreCodegen.after.panic-abort.mir | 24 +-- ...to_slice.PreCodegen.after.panic-unwind.mir | 24 +-- ...mes.foo.ScalarReplacementOfAggregates.diff | 141 ++++++++++-------- 95 files changed, 1227 insertions(+), 909 deletions(-) rename tests/mir-opt/inline/{unchecked_shifts.unchecked_shl_unsigned_smaller.Inline.panic-abort.diff => unchecked_shifts.unchecked_shl_unsigned_smaller.ForceInline.panic-abort.diff} (90%) rename tests/mir-opt/inline/{unchecked_shifts.unchecked_shl_unsigned_smaller.Inline.panic-unwind.diff => unchecked_shifts.unchecked_shl_unsigned_smaller.ForceInline.panic-unwind.diff} (90%) rename tests/mir-opt/inline/{unchecked_shifts.unchecked_shr_signed_bigger.Inline.panic-abort.diff => unchecked_shifts.unchecked_shr_signed_bigger.ForceInline.panic-abort.diff} (90%) rename tests/mir-opt/inline/{unchecked_shifts.unchecked_shr_signed_bigger.Inline.panic-unwind.diff => unchecked_shifts.unchecked_shr_signed_bigger.ForceInline.panic-unwind.diff} (90%) diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index 3db37f1d16f3d..1c71a32c2188b 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -252,7 +252,7 @@ impl Box { /// let five = Box::new(5); /// ``` #[cfg(not(no_global_oom_handling))] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "rust1", since = "1.0.0")] #[must_use] #[rustc_diagnostic_item = "box_new"] @@ -317,7 +317,7 @@ impl Box { #[cfg(not(no_global_oom_handling))] #[stable(feature = "pin", since = "1.33.0")] #[must_use] - #[inline(always)] + #[rustc_early_inline] pub fn pin(x: T) -> Pin> { Box::new(x).into() } @@ -594,7 +594,7 @@ impl Box { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "allocator_api", issue = "32838")] #[must_use] - #[inline(always)] + #[rustc_early_inline] pub fn pin_in(x: T, alloc: A) -> Pin where A: 'static + Allocator, diff --git a/library/alloc/src/collections/binary_heap/mod.rs b/library/alloc/src/collections/binary_heap/mod.rs index 63828b482b9a9..6fa30d4a6bd49 100644 --- a/library/alloc/src/collections/binary_heap/mod.rs +++ b/library/alloc/src/collections/binary_heap/mod.rs @@ -875,7 +875,7 @@ impl BinaryHeap { let tail_len = self.len() - start; - #[inline(always)] + #[rustc_early_inline] fn log2_fast(x: usize) -> usize { (usize::BITS - x.leading_zeros() - 1) as usize } diff --git a/library/alloc/src/collections/linked_list.rs b/library/alloc/src/collections/linked_list.rs index 31dfe73fc7992..b64f51d821be0 100644 --- a/library/alloc/src/collections/linked_list.rs +++ b/library/alloc/src/collections/linked_list.rs @@ -1507,7 +1507,7 @@ impl<'a, T, A: Allocator> Cursor<'a, T, A> { /// Provides a reference to the cursor's parent list. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn as_list(&self) -> &'a LinkedList { self.list @@ -1629,7 +1629,7 @@ impl<'a, T, A: Allocator> CursorMut<'a, T, A> { /// `CursorMut`, which means it cannot outlive the `CursorMut` and that the /// `CursorMut` is frozen for the lifetime of the reference. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[unstable(feature = "linked_list_cursors", issue = "58533")] pub fn as_list(&self) -> &LinkedList { self.list diff --git a/library/alloc/src/ffi/c_str.rs b/library/alloc/src/ffi/c_str.rs index 93bdad7538007..333bdea06e8fc 100644 --- a/library/alloc/src/ffi/c_str.rs +++ b/library/alloc/src/ffi/c_str.rs @@ -269,7 +269,7 @@ impl CString { } // Specialization for avoiding reallocation - #[inline(always)] // Without that it is not inlined into specializations + #[rustc_early_inline] // Without that it is not inlined into specializations fn spec_new_impl_bytes(bytes: &[u8]) -> Result { // We cannot have such large slice that we would overflow here // but using `checked_add` allows LLVM to assume that capacity never overflows diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 5018ff4ad71f3..13fe649649ba0 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -351,7 +351,7 @@ impl Rc { } impl Rc { - #[inline(always)] + #[rustc_early_inline] fn inner(&self) -> &RcInner { // This unsafety is ok because while this Rc is alive we're guaranteed // that the inner pointer is valid. @@ -2251,7 +2251,7 @@ impl RcFromSlice for Rc<[T]> { impl Deref for Rc { type Target = T; - #[inline(always)] + #[rustc_early_inline] fn deref(&self) -> &T { &self.inner().value } diff --git a/library/alloc/src/task.rs b/library/alloc/src/task.rs index b4116f4988b64..94f84ab20dfab 100644 --- a/library/alloc/src/task.rs +++ b/library/alloc/src/task.rs @@ -133,18 +133,18 @@ impl From> for RawWaker { // trait dispatch - instead both impls call this function directly and // explicitly. #[cfg(target_has_atomic = "ptr")] -#[inline(always)] +#[rustc_early_inline] fn raw_waker(waker: Arc) -> RawWaker { // Increment the reference count of the arc to clone it. // - // The #[inline(always)] is to ensure that raw_waker and clone_waker are + // The #[rustc_early_inline] is to ensure that raw_waker and clone_waker are // always generated in the same code generation unit as one another, and // therefore that the structurally identical const-promoted RawWakerVTable // within both functions is deduplicated at LLVM IR code generation time. // This allows optimizing Waker::will_wake to a single pointer comparison of // the vtable pointers, rather than comparing all four function pointers // within the vtables. - #[inline(always)] + #[rustc_early_inline] unsafe fn clone_waker(waker: *const ()) -> RawWaker { unsafe { Arc::increment_strong_count(waker as *const W) }; RawWaker::new( @@ -311,13 +311,13 @@ impl From> for RawWaker { // the safety of `From> for Waker` does not depend on the correct // trait dispatch - instead both impls call this function directly and // explicitly. -#[inline(always)] +#[rustc_early_inline] fn local_raw_waker(waker: Rc) -> RawWaker { // Increment the reference count of the Rc to clone it. // // Refer to the comment on raw_waker's clone_waker regarding why this is // always inline. - #[inline(always)] + #[rustc_early_inline] unsafe fn clone_waker(waker: *const ()) -> RawWaker { unsafe { Rc::increment_strong_count(waker as *const W) }; RawWaker::new( diff --git a/library/alloctests/tests/sort/known_good_stable_sort.rs b/library/alloctests/tests/sort/known_good_stable_sort.rs index 2df891462538d..5419c4e4c12a3 100644 --- a/library/alloctests/tests/sort/known_good_stable_sort.rs +++ b/library/alloctests/tests/sort/known_good_stable_sort.rs @@ -19,12 +19,12 @@ use std::ptr; /// interior mutability will be observable. Same is true if `T: Ord` panics. /// /// Panics if allocating the auxiliary memory fails. -#[inline(always)] +#[rustc_early_inline] pub fn sort(v: &mut [T]) { stable_sort(v, |a, b| a.lt(b)) } -#[inline(always)] +#[rustc_early_inline] fn stable_sort bool>(v: &mut [T], mut is_less: F) { if size_of::() == 0 { return; @@ -65,7 +65,7 @@ unsafe fn mergesort_main bool>(v: &mut [T], is_less: &mut /// no run detection, etc. /// /// Buffer as pointed to by `scratch` must have space for `v.len()` writes. And must not alias `v`. -#[inline(always)] +#[rustc_early_inline] unsafe fn mergesort_core bool>( v: &mut [T], scratch_ptr: *mut T, @@ -96,7 +96,7 @@ unsafe fn mergesort_core bool>( /// /// SAFETY: The caller must ensure that `scratch_ptr` is valid for `v.len()` writes. And that mid is /// in-bounds. -#[inline(always)] +#[rustc_early_inline] unsafe fn merge(v: &mut [T], scratch_ptr: *mut T, is_less: &mut F, mid: usize) where F: FnMut(&T, &T) -> bool, diff --git a/library/core/src/alloc/layout.rs b/library/core/src/alloc/layout.rs index 49275975f046f..885c0edc2c079 100644 --- a/library/core/src/alloc/layout.rs +++ b/library/core/src/alloc/layout.rs @@ -83,7 +83,7 @@ impl Layout { true } - #[inline(always)] + #[rustc_early_inline] const fn max_size_for_align(align: Alignment) -> usize { // (power-of-two implies align != 0.) diff --git a/library/core/src/cell.rs b/library/core/src/cell.rs index d67408cae1b95..818cc22b4ca37 100644 --- a/library/core/src/cell.rs +++ b/library/core/src/cell.rs @@ -816,12 +816,12 @@ const fn panic_already_mutably_borrowed(err: BorrowError) -> ! { type BorrowCounter = isize; const UNUSED: BorrowCounter = 0; -#[inline(always)] +#[rustc_early_inline] const fn is_writing(x: BorrowCounter) -> bool { x < UNUSED } -#[inline(always)] +#[rustc_early_inline] const fn is_reading(x: BorrowCounter) -> bool { x > UNUSED } @@ -2112,7 +2112,7 @@ impl UnsafeCell { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_unsafe_cell_new", since = "1.32.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn new(value: T) -> UnsafeCell { UnsafeCell { value } } @@ -2128,7 +2128,7 @@ impl UnsafeCell { /// /// let five = uc.into_inner(); /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_cell_into_inner", since = "1.83.0")] #[rustc_allow_const_fn_unstable(const_precise_live_drops)] @@ -2180,7 +2180,7 @@ impl UnsafeCell { /// *uc.get_mut() -= 1; /// assert_eq!(*uc.get_mut(), 41); /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "unsafe_cell_from_mut", since = "1.84.0")] #[rustc_const_stable(feature = "unsafe_cell_from_mut", since = "1.84.0")] pub const fn from_mut(value: &mut T) -> &mut UnsafeCell { @@ -2203,7 +2203,7 @@ impl UnsafeCell { /// /// let five = uc.get(); /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_unsafecell_get", since = "1.32.0")] #[rustc_as_ptr] @@ -2230,7 +2230,7 @@ impl UnsafeCell { /// /// assert_eq!(*c.get_mut(), 6); /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "unsafe_cell_get_mut", since = "1.50.0")] #[rustc_const_stable(feature = "const_unsafecell_get_mut", since = "1.83.0")] pub const fn get_mut(&mut self) -> &mut T { @@ -2264,7 +2264,7 @@ impl UnsafeCell { /// /// assert_eq!(uc.into_inner(), 5); /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "unsafe_cell_raw_get", since = "1.56.0")] #[rustc_const_stable(feature = "unsafe_cell_raw_get", since = "1.56.0")] #[rustc_diagnostic_item = "unsafe_cell_raw_get"] diff --git a/library/core/src/convert/mod.rs b/library/core/src/convert/mod.rs index 220a24caf09ee..6720ed1f7bbe4 100644 --- a/library/core/src/convert/mod.rs +++ b/library/core/src/convert/mod.rs @@ -100,7 +100,7 @@ pub use num::FloatToInt; /// ``` #[stable(feature = "convert_id", since = "1.33.0")] #[rustc_const_stable(feature = "const_identity", since = "1.33.0")] -#[inline(always)] +#[rustc_early_inline] #[rustc_diagnostic_item = "convert_identity"] pub const fn identity(x: T) -> T { x diff --git a/library/core/src/hint.rs b/library/core/src/hint.rs index c72eeb9a9c976..9fabf6022aeec 100644 --- a/library/core/src/hint.rs +++ b/library/core/src/hint.rs @@ -194,7 +194,7 @@ pub const unsafe fn unreachable_unchecked() -> ! { /// pointer already has the builtin assumption that it is nonnull. However, it illustrates the /// kind of changes the optimizer can make even when the behavior is less obviously related. #[track_caller] -#[inline(always)] +#[rustc_early_inline] #[doc(alias = "assume")] #[stable(feature = "hint_assert_unchecked", since = "1.81.0")] #[rustc_const_stable(feature = "hint_assert_unchecked", since = "1.81.0")] @@ -264,7 +264,7 @@ pub const unsafe fn assert_unchecked(cond: bool) { /// ``` /// /// [`thread::yield_now`]: ../../std/thread/fn.yield_now.html -#[inline(always)] +#[rustc_early_inline] #[stable(feature = "renamed_spin_loop", since = "1.49.0")] pub fn spin_loop() { #[cfg(target_arch = "x86")] @@ -600,7 +600,7 @@ pub const fn black_box(dummy: T) -> T { /// ``` #[unstable(feature = "hint_must_use", issue = "94745")] #[must_use] // <-- :) -#[inline(always)] +#[rustc_early_inline] pub const fn must_use(value: T) -> T { value } @@ -650,7 +650,7 @@ pub const fn must_use(value: T) -> T { /// } /// ``` #[unstable(feature = "likely_unlikely", issue = "136873")] -#[inline(always)] +#[rustc_early_inline] pub const fn likely(b: bool) -> bool { crate::intrinsics::likely(b) } @@ -700,7 +700,7 @@ pub const fn likely(b: bool) -> bool { /// } /// ``` #[unstable(feature = "likely_unlikely", issue = "136873")] -#[inline(always)] +#[rustc_early_inline] pub const fn unlikely(b: bool) -> bool { crate::intrinsics::unlikely(b) } @@ -733,7 +733,7 @@ pub const fn unlikely(b: bool) -> bool { /// } /// ``` #[unstable(feature = "cold_path", issue = "136873")] -#[inline(always)] +#[rustc_early_inline] pub const fn cold_path() { crate::intrinsics::cold_path() } @@ -779,7 +779,7 @@ pub const fn cold_path() { /// # append(&hasher, 42, &mut bucket_one, &mut bucket_two); /// # assert_eq!(bucket_one.len() + bucket_two.len(), 1); /// ``` -#[inline(always)] +#[rustc_early_inline] #[stable(feature = "select_unpredictable", since = "1.88.0")] pub fn select_unpredictable(condition: bool, true_val: T, false_val: T) -> T { // FIXME(https://github.com/rust-lang/unsafe-code-guidelines/issues/245): diff --git a/library/core/src/intrinsics/mod.rs b/library/core/src/intrinsics/mod.rs index 106cc725fee2c..4714f6d61f222 100644 --- a/library/core/src/intrinsics/mod.rs +++ b/library/core/src/intrinsics/mod.rs @@ -416,7 +416,7 @@ pub const fn cold_path() {} /// This intrinsic does not have a stable counterpart. #[unstable(feature = "core_intrinsics", issue = "none")] #[rustc_nounwind] -#[inline(always)] +#[rustc_early_inline] pub const fn likely(b: bool) -> bool { if b { true @@ -439,7 +439,7 @@ pub const fn likely(b: bool) -> bool { /// This intrinsic does not have a stable counterpart. #[unstable(feature = "core_intrinsics", issue = "none")] #[rustc_nounwind] -#[inline(always)] +#[rustc_early_inline] pub const fn unlikely(b: bool) -> bool { if b { cold_path(); diff --git a/library/core/src/mem/manually_drop.rs b/library/core/src/mem/manually_drop.rs index 02bb81792931e..b56a0febac311 100644 --- a/library/core/src/mem/manually_drop.rs +++ b/library/core/src/mem/manually_drop.rs @@ -176,7 +176,7 @@ impl ManuallyDrop { #[must_use = "if you don't need the wrapper, you can use `mem::forget` instead"] #[stable(feature = "manually_drop", since = "1.20.0")] #[rustc_const_stable(feature = "const_manually_drop", since = "1.32.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn new(value: T) -> ManuallyDrop { ManuallyDrop { value } } @@ -194,7 +194,7 @@ impl ManuallyDrop { /// ``` #[stable(feature = "manually_drop", since = "1.20.0")] #[rustc_const_stable(feature = "const_manually_drop", since = "1.32.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn into_inner(slot: ManuallyDrop) -> T { slot.value } diff --git a/library/core/src/mem/maybe_uninit.rs b/library/core/src/mem/maybe_uninit.rs index c160360cfacf9..b68860c39af86 100644 --- a/library/core/src/mem/maybe_uninit.rs +++ b/library/core/src/mem/maybe_uninit.rs @@ -303,7 +303,7 @@ impl MaybeUninit { #[stable(feature = "maybe_uninit", since = "1.36.0")] #[rustc_const_stable(feature = "const_maybe_uninit", since = "1.36.0")] #[must_use = "use `forget` to avoid running Drop code"] - #[inline(always)] + #[rustc_early_inline] pub const fn new(val: T) -> MaybeUninit { MaybeUninit { value: ManuallyDrop::new(val) } } @@ -325,7 +325,7 @@ impl MaybeUninit { #[stable(feature = "maybe_uninit", since = "1.36.0")] #[rustc_const_stable(feature = "const_maybe_uninit", since = "1.36.0")] #[must_use] - #[inline(always)] + #[rustc_early_inline] #[rustc_diagnostic_item = "maybe_uninit_uninit"] pub const fn uninit() -> MaybeUninit { MaybeUninit { uninit: () } @@ -468,7 +468,7 @@ impl MaybeUninit { /// } /// } /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "maybe_uninit_write", since = "1.55.0")] #[rustc_const_stable(feature = "const_maybe_uninit_write", since = "1.85.0")] pub const fn write(&mut self, val: T) -> &mut T { @@ -513,7 +513,7 @@ impl MaybeUninit { #[stable(feature = "maybe_uninit", since = "1.36.0")] #[rustc_const_stable(feature = "const_maybe_uninit_as_ptr", since = "1.59.0")] #[rustc_as_ptr] - #[inline(always)] + #[rustc_early_inline] pub const fn as_ptr(&self) -> *const T { // `MaybeUninit` and `ManuallyDrop` are both `repr(transparent)` so we can cast the pointer. self as *const _ as *const T @@ -555,7 +555,7 @@ impl MaybeUninit { #[stable(feature = "maybe_uninit", since = "1.36.0")] #[rustc_const_stable(feature = "const_maybe_uninit_as_mut_ptr", since = "1.83.0")] #[rustc_as_ptr] - #[inline(always)] + #[rustc_early_inline] pub const fn as_mut_ptr(&mut self) -> *mut T { // `MaybeUninit` and `ManuallyDrop` are both `repr(transparent)` so we can cast the pointer. self as *mut _ as *mut T @@ -608,7 +608,7 @@ impl MaybeUninit { /// ``` #[stable(feature = "maybe_uninit", since = "1.36.0")] #[rustc_const_stable(feature = "const_maybe_uninit_assume_init_by_value", since = "1.59.0")] - #[inline(always)] + #[rustc_early_inline] #[rustc_diagnostic_item = "assume_init"] #[track_caller] pub const unsafe fn assume_init(self) -> T { @@ -681,7 +681,7 @@ impl MaybeUninit { /// ``` #[stable(feature = "maybe_uninit_extra", since = "1.60.0")] #[rustc_const_stable(feature = "const_maybe_uninit_assume_init_read", since = "1.75.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn assume_init_read(&self) -> T { // SAFETY: the caller must guarantee that `self` is initialized. @@ -778,7 +778,7 @@ impl MaybeUninit { /// ``` #[stable(feature = "maybe_uninit_ref", since = "1.55.0")] #[rustc_const_stable(feature = "const_maybe_uninit_assume_init_ref", since = "1.59.0")] - #[inline(always)] + #[rustc_early_inline] pub const unsafe fn assume_init_ref(&self) -> &T { // SAFETY: the caller must guarantee that `self` is initialized. // This also means that `self` must be a `value` variant. @@ -895,7 +895,7 @@ impl MaybeUninit { /// ``` #[stable(feature = "maybe_uninit_ref", since = "1.55.0")] #[rustc_const_stable(feature = "const_maybe_uninit_assume_init", since = "1.84.0")] - #[inline(always)] + #[rustc_early_inline] pub const unsafe fn assume_init_mut(&mut self) -> &mut T { // SAFETY: the caller must guarantee that `self` is initialized. // This also means that `self` must be a `value` variant. @@ -931,7 +931,7 @@ impl MaybeUninit { /// assert_eq!(array, [0, 1, 2]); /// ``` #[unstable(feature = "maybe_uninit_array_assume_init", issue = "96097")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn array_assume_init(array: [Self; N]) -> [T; N] { // SAFETY: @@ -1006,14 +1006,14 @@ impl MaybeUninit { /// Gets a pointer to the first element of the array. #[unstable(feature = "maybe_uninit_slice", issue = "63569")] - #[inline(always)] + #[rustc_early_inline] pub const fn slice_as_ptr(this: &[MaybeUninit]) -> *const T { this.as_ptr() as *const T } /// Gets a mutable pointer to the first element of the array. #[unstable(feature = "maybe_uninit_slice", issue = "63569")] - #[inline(always)] + #[rustc_early_inline] pub const fn slice_as_mut_ptr(this: &mut [MaybeUninit]) -> *mut T { this.as_mut_ptr() as *mut T } @@ -1389,7 +1389,7 @@ impl [MaybeUninit] { /// non-null. Dropping such a `Vec` however will cause undefined /// behaviour. #[unstable(feature = "maybe_uninit_slice", issue = "63569")] - #[inline(always)] + #[rustc_early_inline] pub unsafe fn assume_init_drop(&mut self) { if !self.is_empty() { // SAFETY: the caller must guarantee that every element of `self` @@ -1407,7 +1407,7 @@ impl [MaybeUninit] { /// behavior: it is up to the caller to guarantee that every `MaybeUninit` in /// the slice really is in an initialized state. #[unstable(feature = "maybe_uninit_slice", issue = "63569")] - #[inline(always)] + #[rustc_early_inline] pub const unsafe fn assume_init_ref(&self) -> &[T] { // SAFETY: casting `slice` to a `*const [T]` is safe since the caller guarantees that // `slice` is initialized, and `MaybeUninit` is guaranteed to have the same layout as `T`. @@ -1425,7 +1425,7 @@ impl [MaybeUninit] { /// slice really is in an initialized state. For instance, `.assume_init_mut()` cannot /// be used to initialize a `MaybeUninit` slice. #[unstable(feature = "maybe_uninit_slice", issue = "63569")] - #[inline(always)] + #[rustc_early_inline] pub const unsafe fn assume_init_mut(&mut self) -> &mut [T] { // SAFETY: similar to safety notes for `slice_get_ref`, but we have a // mutable reference which is also guaranteed to be valid for writes. diff --git a/library/core/src/mem/mod.rs b/library/core/src/mem/mod.rs index db4c8e9e55150..1a43bb1760248 100644 --- a/library/core/src/mem/mod.rs +++ b/library/core/src/mem/mod.rs @@ -325,7 +325,7 @@ pub fn forget_unsized(t: T) { /// [`Box`]: ../../std/boxed/struct.Box.html /// [`Option<&T>`]: crate::option::Option /// -#[inline(always)] +#[rustc_early_inline] #[must_use] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] @@ -480,7 +480,7 @@ pub fn min_align_of_val(val: &T) -> usize { /// ``` /// assert_eq!(4, align_of::()); /// ``` -#[inline(always)] +#[rustc_early_inline] #[must_use] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] @@ -662,7 +662,7 @@ pub const fn needs_drop() -> bool { /// let _x: &i32 = unsafe { mem::zeroed() }; // Undefined behavior! /// let _y: fn() = unsafe { mem::zeroed() }; // And again! /// ``` -#[inline(always)] +#[rustc_early_inline] #[must_use] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "mem_zeroed"] @@ -699,7 +699,7 @@ pub const unsafe fn zeroed() -> T { /// [uninit]: MaybeUninit::uninit /// [assume_init]: MaybeUninit::assume_init /// [inv]: MaybeUninit#initialization-invariant -#[inline(always)] +#[rustc_early_inline] #[must_use] #[deprecated(since = "1.39.0", note = "use `mem::MaybeUninit` instead")] #[stable(feature = "rust1", since = "1.0.0")] @@ -1213,7 +1213,7 @@ pub const fn discriminant(v: &T) -> Discriminant { /// assert_eq!(mem::variant_count::>(), 2); /// assert_eq!(mem::variant_count::>(), 2); /// ``` -#[inline(always)] +#[rustc_early_inline] #[must_use] #[unstable(feature = "variant_count", issue = "73662")] #[rustc_const_unstable(feature = "variant_count", issue = "73662")] diff --git a/library/core/src/num/int_macros.rs b/library/core/src/num/int_macros.rs index 5dc9d97de4b3f..8cd12a8acf492 100644 --- a/library/core/src/num/int_macros.rs +++ b/library/core/src/num/int_macros.rs @@ -72,7 +72,7 @@ macro_rules! int_impl { #[doc(alias = "popcnt")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn count_ones(self) -> u32 { (self as $UnsignedT).count_ones() } /// Returns the number of zeros in the binary representation of `self`. @@ -86,7 +86,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn count_zeros(self) -> u32 { (!self).count_ones() } @@ -108,7 +108,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn leading_zeros(self) -> u32 { (self as $UnsignedT).leading_zeros() } @@ -126,7 +126,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn trailing_zeros(self) -> u32 { (self as $UnsignedT).trailing_zeros() } @@ -144,7 +144,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "leading_trailing_ones", since = "1.46.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn leading_ones(self) -> u32 { (self as $UnsignedT).leading_ones() } @@ -162,7 +162,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "leading_trailing_ones", since = "1.46.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn trailing_ones(self) -> u32 { (self as $UnsignedT).trailing_ones() } @@ -183,7 +183,7 @@ macro_rules! int_impl { #[unstable(feature = "isolate_most_least_significant_one", issue = "136909")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn isolate_most_significant_one(self) -> Self { self & (((1 as $SelfT) << (<$SelfT>::BITS - 1)).wrapping_shr(self.leading_zeros())) } @@ -204,7 +204,7 @@ macro_rules! int_impl { #[unstable(feature = "isolate_most_least_significant_one", issue = "136909")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn isolate_least_significant_one(self) -> Self { self & self.wrapping_neg() } @@ -247,7 +247,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn rotate_left(self, n: u32) -> Self { (self as $UnsignedT).rotate_left(n) as Self } @@ -270,7 +270,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn rotate_right(self, n: u32) -> Self { (self as $UnsignedT).rotate_right(n) as Self } @@ -290,7 +290,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn swap_bytes(self) -> Self { (self as $UnsignedT).swap_bytes() as Self } @@ -311,7 +311,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "reverse_bits", since = "1.37.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn reverse_bits(self) -> Self { (self as $UnsignedT).reverse_bits() as Self } @@ -511,7 +511,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "unchecked_math", since = "1.79.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn unchecked_add(self, rhs: Self) -> Self { assert_unsafe_precondition!( @@ -653,7 +653,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "unchecked_math", since = "1.79.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn unchecked_sub(self, rhs: Self) -> Self { assert_unsafe_precondition!( @@ -795,7 +795,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "unchecked_math", since = "1.79.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn unchecked_mul(self, rhs: Self) -> Self { assert_unsafe_precondition!( @@ -1221,7 +1221,7 @@ macro_rules! int_impl { )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn unchecked_neg(self) -> Self { assert_unsafe_precondition!( @@ -1343,7 +1343,7 @@ macro_rules! int_impl { )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn unchecked_shl(self, rhs: u32) -> Self { assert_unsafe_precondition!( @@ -1459,7 +1459,7 @@ macro_rules! int_impl { )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn unchecked_shr(self, rhs: u32) -> Self { assert_unsafe_precondition!( @@ -1710,7 +1710,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn saturating_add(self, rhs: Self) -> Self { intrinsics::saturating_add(self, rhs) } @@ -1752,7 +1752,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn saturating_sub(self, rhs: Self) -> Self { intrinsics::saturating_sub(self, rhs) } @@ -1796,7 +1796,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn saturating_neg(self) -> Self { intrinsics::saturating_sub(0, self) } @@ -1933,7 +1933,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "mixed_integer_ops", since = "1.66.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_add_unsigned(self, rhs: $UnsignedT) -> Self { self.wrapping_add(rhs as Self) } @@ -1951,7 +1951,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_sub(self, rhs: Self) -> Self { intrinsics::wrapping_sub(self, rhs) } @@ -1969,7 +1969,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "mixed_integer_ops", since = "1.66.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_sub_unsigned(self, rhs: $UnsignedT) -> Self { self.wrapping_sub(rhs as Self) } @@ -1987,7 +1987,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_mul(self, rhs: Self) -> Self { intrinsics::wrapping_mul(self, rhs) } @@ -2113,7 +2113,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_neg(self) -> Self { (0 as $SelfT).wrapping_sub(self) } @@ -2136,7 +2136,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_shl(self, rhs: u32) -> Self { // SAFETY: the masking by the bitsize of the type ensures that we do not shift // out of bounds @@ -2163,7 +2163,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_shr(self, rhs: u32) -> Self { // SAFETY: the masking by the bitsize of the type ensures that we do not shift // out of bounds @@ -2291,7 +2291,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn overflowing_add(self, rhs: Self) -> (Self, bool) { let (a, b) = intrinsics::add_with_overflow(self as $ActualT, rhs as $ActualT); (a as Self, b) @@ -2393,7 +2393,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn overflowing_sub(self, rhs: Self) -> (Self, bool) { let (a, b) = intrinsics::sub_with_overflow(self as $ActualT, rhs as $ActualT); (a as Self, b) @@ -2496,7 +2496,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn overflowing_mul(self, rhs: Self) -> (Self, bool) { let (a, b) = intrinsics::mul_with_overflow(self as $ActualT, rhs as $ActualT); (a as Self, b) @@ -3463,7 +3463,7 @@ macro_rules! int_impl { #[rustc_const_stable(feature = "const_int_sign", since = "1.47.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn signum(self) -> Self { // Picking the right way to phrase this is complicated // () @@ -3485,7 +3485,7 @@ macro_rules! int_impl { #[must_use] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn is_positive(self) -> bool { self > 0 } /// Returns `true` if `self` is negative and `false` if the number is zero or @@ -3500,7 +3500,7 @@ macro_rules! int_impl { #[must_use] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn is_negative(self) -> bool { self < 0 } /// Returns the memory representation of this integer as a byte array in @@ -3689,7 +3689,7 @@ macro_rules! int_impl { /// /// Returns the smallest value that can be represented by this integer type. #[stable(feature = "rust1", since = "1.0.0")] - #[inline(always)] + #[rustc_early_inline] #[rustc_promotable] #[rustc_const_stable(feature = "const_min_value", since = "1.32.0")] #[deprecated(since = "TBD", note = "replaced by the `MIN` associated constant on this type")] @@ -3703,7 +3703,7 @@ macro_rules! int_impl { /// /// Returns the largest value that can be represented by this integer type. #[stable(feature = "rust1", since = "1.0.0")] - #[inline(always)] + #[rustc_early_inline] #[rustc_promotable] #[rustc_const_stable(feature = "const_max_value", since = "1.32.0")] #[deprecated(since = "TBD", note = "replaced by the `MAX` associated constant on this type")] diff --git a/library/core/src/num/mod.rs b/library/core/src/num/mod.rs index acfe38b7a37b5..ab9c71507876d 100644 --- a/library/core/src/num/mod.rs +++ b/library/core/src/num/mod.rs @@ -1357,7 +1357,7 @@ pub enum FpCategory { /// Note that if the radix is known to the compiler, it is just the check of digits.len that /// is done at runtime. #[doc(hidden)] -#[inline(always)] +#[rustc_early_inline] #[unstable(issue = "none", feature = "std_internals")] pub const fn can_not_overflow(radix: u32, is_signed_ty: bool, digits: &[u8]) -> bool { radix <= 16 && digits.len() <= size_of::() * 2 - is_signed_ty as usize diff --git a/library/core/src/num/nonzero.rs b/library/core/src/num/nonzero.rs index f793602de5087..a8e44d7ec3143 100644 --- a/library/core/src/num/nonzero.rs +++ b/library/core/src/num/nonzero.rs @@ -636,7 +636,7 @@ macro_rules! nonzero_integer { #[unstable(feature = "isolate_most_least_significant_one", issue = "136909")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn isolate_most_significant_one(self) -> Self { let n = self.get() & (((1 as $Int) << (<$Int>::BITS - 1)).wrapping_shr(self.leading_zeros())); @@ -666,7 +666,7 @@ macro_rules! nonzero_integer { #[unstable(feature = "isolate_most_least_significant_one", issue = "136909")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn isolate_least_significant_one(self) -> Self { let n = self.get(); let n = n & n.wrapping_neg(); @@ -700,7 +700,7 @@ macro_rules! nonzero_integer { #[doc(alias = "popcnt")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn count_ones(self) -> NonZero { // SAFETY: // `self` is non-zero, which means it has at least one bit set, which means @@ -731,7 +731,7 @@ macro_rules! nonzero_integer { #[unstable(feature = "nonzero_bitwise", issue = "128281")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn rotate_left(self, n: u32) -> Self { let result = self.get().rotate_left(n); // SAFETY: Rotating bits preserves the property int > 0. @@ -762,7 +762,7 @@ macro_rules! nonzero_integer { #[unstable(feature = "nonzero_bitwise", issue = "128281")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn rotate_right(self, n: u32) -> Self { let result = self.get().rotate_right(n); // SAFETY: Rotating bits preserves the property int > 0. @@ -789,7 +789,7 @@ macro_rules! nonzero_integer { #[unstable(feature = "nonzero_bitwise", issue = "128281")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn swap_bytes(self) -> Self { let result = self.get().swap_bytes(); // SAFETY: Shuffling bytes preserves the property int > 0. @@ -817,7 +817,7 @@ macro_rules! nonzero_integer { #[unstable(feature = "nonzero_bitwise", issue = "128281")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn reverse_bits(self) -> Self { let result = self.get().reverse_bits(); // SAFETY: Reversing bits preserves the property int > 0. @@ -850,7 +850,7 @@ macro_rules! nonzero_integer { /// ``` #[unstable(feature = "nonzero_bitwise", issue = "128281")] #[must_use] - #[inline(always)] + #[rustc_early_inline] pub const fn from_be(x: Self) -> Self { let result = $Int::from_be(x.get()); // SAFETY: Shuffling bytes preserves the property int > 0. @@ -883,7 +883,7 @@ macro_rules! nonzero_integer { /// ``` #[unstable(feature = "nonzero_bitwise", issue = "128281")] #[must_use] - #[inline(always)] + #[rustc_early_inline] pub const fn from_le(x: Self) -> Self { let result = $Int::from_le(x.get()); // SAFETY: Shuffling bytes preserves the property int > 0. @@ -916,7 +916,7 @@ macro_rules! nonzero_integer { #[unstable(feature = "nonzero_bitwise", issue = "128281")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn to_be(self) -> Self { let result = self.get().to_be(); // SAFETY: Shuffling bytes preserves the property int > 0. @@ -949,7 +949,7 @@ macro_rules! nonzero_integer { #[unstable(feature = "nonzero_bitwise", issue = "128281")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn to_le(self) -> Self { let result = self.get().to_le(); // SAFETY: Shuffling bytes preserves the property int > 0. @@ -1693,7 +1693,7 @@ macro_rules! nonzero_integer_signedness_dependent_methods { #[rustc_const_stable(feature = "integer_sign_cast", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn cast_signed(self) -> NonZero<$Sint> { // SAFETY: `self.get()` can't be zero unsafe { NonZero::new_unchecked(self.get().cast_signed()) } @@ -2130,7 +2130,7 @@ macro_rules! nonzero_integer_signedness_dependent_methods { #[rustc_const_stable(feature = "integer_sign_cast", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn cast_unsigned(self) -> NonZero<$Uint> { // SAFETY: `self.get()` can't be zero unsafe { NonZero::new_unchecked(self.get().cast_unsigned()) } diff --git a/library/core/src/num/uint_macros.rs b/library/core/src/num/uint_macros.rs index a70d64e6bb401..d381b33033cb3 100644 --- a/library/core/src/num/uint_macros.rs +++ b/library/core/src/num/uint_macros.rs @@ -74,7 +74,7 @@ macro_rules! uint_impl { #[doc(alias = "popcnt")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn count_ones(self) -> u32 { return intrinsics::ctpop(self); } @@ -94,7 +94,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn count_zeros(self) -> u32 { (!self).count_ones() } @@ -121,7 +121,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn leading_zeros(self) -> u32 { return intrinsics::ctlz(self as $ActualT); } @@ -145,7 +145,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn trailing_zeros(self) -> u32 { return intrinsics::cttz(self); } @@ -168,7 +168,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "leading_trailing_ones", since = "1.46.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn leading_ones(self) -> u32 { (!self).leading_zeros() } @@ -192,7 +192,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "leading_trailing_ones", since = "1.46.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn trailing_ones(self) -> u32 { (!self).trailing_zeros() } @@ -214,7 +214,7 @@ macro_rules! uint_impl { #[unstable(feature = "uint_bit_width", issue = "142326")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn bit_width(self) -> u32 { Self::BITS - self.leading_zeros() } @@ -235,7 +235,7 @@ macro_rules! uint_impl { #[unstable(feature = "isolate_most_least_significant_one", issue = "136909")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn isolate_most_significant_one(self) -> Self { self & (((1 as $SelfT) << (<$SelfT>::BITS - 1)).wrapping_shr(self.leading_zeros())) } @@ -256,7 +256,7 @@ macro_rules! uint_impl { #[unstable(feature = "isolate_most_least_significant_one", issue = "136909")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn isolate_least_significant_one(self) -> Self { self & self.wrapping_neg() } @@ -299,7 +299,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn rotate_left(self, n: u32) -> Self { return intrinsics::rotate_left(self, n); } @@ -322,7 +322,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn rotate_right(self, n: u32) -> Self { return intrinsics::rotate_right(self, n); } @@ -341,7 +341,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn swap_bytes(self) -> Self { intrinsics::bswap(self as $ActualT) as Self } @@ -362,7 +362,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "reverse_bits", since = "1.37.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn reverse_bits(self) -> Self { intrinsics::bitreverse(self as $ActualT) as Self } @@ -386,7 +386,7 @@ macro_rules! uint_impl { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_math", since = "1.32.0")] #[must_use] - #[inline(always)] + #[rustc_early_inline] pub const fn from_be(x: Self) -> Self { #[cfg(target_endian = "big")] { @@ -417,7 +417,7 @@ macro_rules! uint_impl { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_math", since = "1.32.0")] #[must_use] - #[inline(always)] + #[rustc_early_inline] pub const fn from_le(x: Self) -> Self { #[cfg(target_endian = "little")] { @@ -449,7 +449,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn to_be(self) -> Self { // or not to be? #[cfg(target_endian = "big")] { @@ -481,7 +481,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn to_le(self) -> Self { #[cfg(target_endian = "little")] { @@ -580,7 +580,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "unchecked_math", since = "1.79.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn unchecked_add(self, rhs: Self) -> Self { assert_unsafe_precondition!( @@ -762,7 +762,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "unchecked_math", since = "1.79.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn unchecked_sub(self, rhs: Self) -> Self { assert_unsafe_precondition!( @@ -974,7 +974,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "unchecked_math", since = "1.79.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn unchecked_mul(self, rhs: Self) -> Self { assert_unsafe_precondition!( @@ -1042,7 +1042,7 @@ macro_rules! uint_impl { #[unstable(feature = "strict_overflow_ops", issue = "118260")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const fn strict_div(self, rhs: Self) -> Self { self / rhs @@ -1097,7 +1097,7 @@ macro_rules! uint_impl { #[unstable(feature = "strict_overflow_ops", issue = "118260")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const fn strict_div_euclid(self, rhs: Self) -> Self { self / rhs @@ -1252,7 +1252,7 @@ macro_rules! uint_impl { #[unstable(feature = "strict_overflow_ops", issue = "118260")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const fn strict_rem(self, rhs: Self) -> Self { self % rhs @@ -1309,7 +1309,7 @@ macro_rules! uint_impl { #[unstable(feature = "strict_overflow_ops", issue = "118260")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const fn strict_rem_euclid(self, rhs: Self) -> Self { self % rhs @@ -1662,7 +1662,7 @@ macro_rules! uint_impl { )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn unchecked_shl(self, rhs: u32) -> Self { assert_unsafe_precondition!( @@ -1778,7 +1778,7 @@ macro_rules! uint_impl { )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn unchecked_shr(self, rhs: u32) -> Self { assert_unsafe_precondition!( @@ -1915,7 +1915,7 @@ macro_rules! uint_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn saturating_add(self, rhs: Self) -> Self { intrinsics::saturating_add(self, rhs) } @@ -1959,7 +1959,7 @@ macro_rules! uint_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn saturating_sub(self, rhs: Self) -> Self { intrinsics::saturating_sub(self, rhs) } @@ -2107,7 +2107,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_sub(self, rhs: Self) -> Self { intrinsics::wrapping_sub(self, rhs) } @@ -2147,7 +2147,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_mul(self, rhs: Self) -> Self { intrinsics::wrapping_mul(self, rhs) } @@ -2171,7 +2171,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_wrapping_int_methods", since = "1.52.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const fn wrapping_div(self, rhs: Self) -> Self { self / rhs @@ -2198,7 +2198,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_euclidean_int_methods", since = "1.52.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const fn wrapping_div_euclid(self, rhs: Self) -> Self { self / rhs @@ -2224,7 +2224,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_wrapping_int_methods", since = "1.52.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const fn wrapping_rem(self, rhs: Self) -> Self { self % rhs @@ -2252,7 +2252,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_euclidean_int_methods", since = "1.52.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const fn wrapping_rem_euclid(self, rhs: Self) -> Self { self % rhs @@ -2280,7 +2280,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_neg(self) -> Self { (0 as $SelfT).wrapping_sub(self) } @@ -2306,7 +2306,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_shl(self, rhs: u32) -> Self { // SAFETY: the masking by the bitsize of the type ensures that we do not shift // out of bounds @@ -2336,7 +2336,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_shr(self, rhs: u32) -> Self { // SAFETY: the masking by the bitsize of the type ensures that we do not shift // out of bounds @@ -2414,7 +2414,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn overflowing_add(self, rhs: Self) -> (Self, bool) { let (a, b) = intrinsics::add_with_overflow(self as $ActualT, rhs as $ActualT); (a as Self, b) @@ -2515,7 +2515,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn overflowing_sub(self, rhs: Self) -> (Self, bool) { let (a, b) = intrinsics::sub_with_overflow(self as $ActualT, rhs as $ActualT); (a as Self, b) @@ -2638,7 +2638,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn overflowing_mul(self, rhs: Self) -> (Self, bool) { let (a, b) = intrinsics::mul_with_overflow(self as $ActualT, rhs as $ActualT); (a as Self, b) @@ -2839,7 +2839,7 @@ macro_rules! uint_impl { /// ``` #[doc = concat!("assert_eq!(5", stringify!($SelfT), ".overflowing_div(2), (2, false));")] /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "wrapping", since = "1.7.0")] #[rustc_const_stable(feature = "const_overflowing_int_methods", since = "1.52.0")] #[must_use = "this returns the result of the operation, \ @@ -2868,7 +2868,7 @@ macro_rules! uint_impl { /// ``` #[doc = concat!("assert_eq!(5", stringify!($SelfT), ".overflowing_div_euclid(2), (2, false));")] /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "euclidean_division", since = "1.38.0")] #[rustc_const_stable(feature = "const_euclidean_int_methods", since = "1.52.0")] #[must_use = "this returns the result of the operation, \ @@ -2894,7 +2894,7 @@ macro_rules! uint_impl { /// ``` #[doc = concat!("assert_eq!(5", stringify!($SelfT), ".overflowing_rem(2), (1, false));")] /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "wrapping", since = "1.7.0")] #[rustc_const_stable(feature = "const_overflowing_int_methods", since = "1.52.0")] #[must_use = "this returns the result of the operation, \ @@ -2923,7 +2923,7 @@ macro_rules! uint_impl { /// ``` #[doc = concat!("assert_eq!(5", stringify!($SelfT), ".overflowing_rem_euclid(2), (1, false));")] /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "euclidean_division", since = "1.38.0")] #[rustc_const_stable(feature = "const_euclidean_int_methods", since = "1.52.0")] #[must_use = "this returns the result of the operation, \ @@ -2946,7 +2946,7 @@ macro_rules! uint_impl { #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".overflowing_neg(), (0, false));")] #[doc = concat!("assert_eq!(2", stringify!($SelfT), ".overflowing_neg(), (-2i32 as ", stringify!($SelfT), ", true));")] /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "wrapping", since = "1.7.0")] #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ @@ -2974,7 +2974,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn overflowing_shl(self, rhs: u32) -> (Self, bool) { (self.wrapping_shl(rhs), rhs >= Self::BITS) } @@ -2997,7 +2997,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] pub const fn overflowing_shr(self, rhs: u32) -> (Self, bool) { (self.wrapping_shr(rhs), rhs >= Self::BITS) } @@ -3151,7 +3151,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_euclidean_int_methods", since = "1.52.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const fn div_euclid(self, rhs: Self) -> Self { self / rhs @@ -3178,7 +3178,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_euclidean_int_methods", since = "1.52.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const fn rem_euclid(self, rhs: Self) -> Self { self % rhs @@ -3201,7 +3201,7 @@ macro_rules! uint_impl { #[unstable(feature = "int_roundings", issue = "88581")] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const fn div_floor(self, rhs: Self) -> Self { self / rhs @@ -3328,7 +3328,7 @@ macro_rules! uint_impl { #[must_use] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_is_power_of_two", since = "1.32.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn is_power_of_two(self) -> bool { self.count_ones() == 1 } @@ -3607,7 +3607,7 @@ macro_rules! uint_impl { /// Returns the smallest value that can be represented by this integer type. #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] - #[inline(always)] + #[rustc_early_inline] #[rustc_const_stable(feature = "const_max_value", since = "1.32.0")] #[deprecated(since = "TBD", note = "replaced by the `MIN` associated constant on this type")] #[rustc_diagnostic_item = concat!(stringify!($SelfT), "_legacy_fn_min_value")] @@ -3619,7 +3619,7 @@ macro_rules! uint_impl { /// Returns the largest value that can be represented by this integer type. #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] - #[inline(always)] + #[rustc_early_inline] #[rustc_const_stable(feature = "const_max_value", since = "1.32.0")] #[deprecated(since = "TBD", note = "replaced by the `MAX` associated constant on this type")] #[rustc_diagnostic_item = concat!(stringify!($SelfT), "_legacy_fn_max_value")] diff --git a/library/core/src/option.rs b/library/core/src/option.rs index ed070fbd22746..2cdb1270e3300 100644 --- a/library/core/src/option.rs +++ b/library/core/src/option.rs @@ -997,7 +997,7 @@ impl Option { /// let x: Option<&str> = None; /// assert_eq!(x.unwrap(), "air"); // fails /// ``` - #[inline(always)] + #[rustc_early_inline] #[track_caller] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "option_unwrap"] diff --git a/library/core/src/pin.rs b/library/core/src/pin.rs index 14bf7ba90150e..cf96bbae39c69 100644 --- a/library/core/src/pin.rs +++ b/library/core/src/pin.rs @@ -1177,7 +1177,7 @@ impl> Pin { /// // pinning guarantees are actually upheld. /// let mut pinned: Pin<&mut u8> = Pin::new(&mut val); /// ``` - #[inline(always)] + #[rustc_early_inline] #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] #[stable(feature = "pin", since = "1.33.0")] pub const fn new(pointer: Ptr) -> Pin { @@ -1205,7 +1205,7 @@ impl> Pin { /// let r = Pin::into_inner(pinned); /// assert_eq!(*r, 5); /// ``` - #[inline(always)] + #[rustc_early_inline] #[rustc_allow_const_fn_unstable(const_precise_live_drops)] #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] #[stable(feature = "pin_into_inner", since = "1.39.0")] @@ -1343,7 +1343,7 @@ impl Pin { /// [`mem::swap`]: crate::mem::swap /// [`pin` module docs]: self #[lang = "new_unchecked"] - #[inline(always)] + #[rustc_early_inline] #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] #[stable(feature = "pin", since = "1.33.0")] pub const unsafe fn new_unchecked(pointer: Ptr) -> Pin { @@ -1358,7 +1358,7 @@ impl Pin { /// "Malicious" implementations of `Pointer::Deref` are likewise /// ruled out by the contract of `Pin::new_unchecked`. #[stable(feature = "pin", since = "1.33.0")] - #[inline(always)] + #[rustc_early_inline] pub fn as_ref(&self) -> Pin<&Ptr::Target> { // SAFETY: see documentation on this function unsafe { Pin::new_unchecked(&*self.pointer) } @@ -1402,7 +1402,7 @@ impl Pin { /// } /// ``` #[stable(feature = "pin", since = "1.33.0")] - #[inline(always)] + #[rustc_early_inline] pub fn as_mut(&mut self) -> Pin<&mut Ptr::Target> { // SAFETY: see documentation on this function unsafe { Pin::new_unchecked(&mut *self.pointer) } @@ -1417,7 +1417,7 @@ impl Pin { /// `Pin::new_unchecked`. #[stable(feature = "pin_deref_mut", since = "1.84.0")] #[must_use = "`self` will be dropped if the result is not used"] - #[inline(always)] + #[rustc_early_inline] pub fn as_deref_mut(self: Pin<&mut Self>) -> Pin<&mut Ptr::Target> { // SAFETY: What we're asserting here is that going from // @@ -1467,7 +1467,7 @@ impl Pin { /// /// [subtle-details]: self#subtle-details-and-the-drop-guarantee #[stable(feature = "pin", since = "1.33.0")] - #[inline(always)] + #[rustc_early_inline] pub fn set(&mut self, value: Ptr::Target) where Ptr::Target: Sized, @@ -1495,7 +1495,7 @@ impl Pin { /// /// If the underlying data is [`Unpin`], [`Pin::into_inner`] should be used /// instead. - #[inline(always)] + #[rustc_early_inline] #[rustc_allow_const_fn_unstable(const_precise_live_drops)] #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] #[stable(feature = "pin_into_inner", since = "1.39.0")] @@ -1551,7 +1551,7 @@ impl<'a, T: ?Sized> Pin<&'a T> { /// with the same lifetime as the reference it wraps. /// /// ["pinning projections"]: self#projections-and-structural-pinning - #[inline(always)] + #[rustc_early_inline] #[must_use] #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] #[stable(feature = "pin", since = "1.33.0")] @@ -1562,7 +1562,7 @@ impl<'a, T: ?Sized> Pin<&'a T> { impl<'a, T: ?Sized> Pin<&'a mut T> { /// Converts this `Pin<&mut T>` into a `Pin<&T>` with the same lifetime. - #[inline(always)] + #[rustc_early_inline] #[must_use = "`self` will be dropped if the result is not used"] #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] #[stable(feature = "pin", since = "1.33.0")] @@ -1579,7 +1579,7 @@ impl<'a, T: ?Sized> Pin<&'a mut T> { /// that lives for as long as the borrow of the `Pin`, not the lifetime of /// the `Pin` itself. This method allows turning the `Pin` into a reference /// with the same lifetime as the original `Pin`. - #[inline(always)] + #[rustc_early_inline] #[must_use = "`self` will be dropped if the result is not used"] #[stable(feature = "pin", since = "1.33.0")] #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] @@ -1600,7 +1600,7 @@ impl<'a, T: ?Sized> Pin<&'a mut T> { /// /// If the underlying data is `Unpin`, `Pin::get_mut` should be used /// instead. - #[inline(always)] + #[rustc_early_inline] #[must_use = "`self` will be dropped if the result is not used"] #[stable(feature = "pin", since = "1.33.0")] #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] diff --git a/library/core/src/pin/unsafe_pinned.rs b/library/core/src/pin/unsafe_pinned.rs index b18b5d7c9ec0d..50a30c0a1518c 100644 --- a/library/core/src/pin/unsafe_pinned.rs +++ b/library/core/src/pin/unsafe_pinned.rs @@ -48,7 +48,7 @@ impl UnsafePinned { /// /// All access to the inner value through `&UnsafePinned` or `&mut UnsafePinned` or /// `Pin<&mut UnsafePinned>` requires `unsafe` code. - #[inline(always)] + #[rustc_early_inline] #[must_use] #[unstable(feature = "unsafe_pinned", issue = "125735")] pub const fn new(value: T) -> Self { @@ -56,7 +56,7 @@ impl UnsafePinned { } /// Unwraps the value, consuming this `UnsafePinned`. - #[inline(always)] + #[rustc_early_inline] #[must_use] #[unstable(feature = "unsafe_pinned", issue = "125735")] #[rustc_allow_const_fn_unstable(const_precise_live_drops)] @@ -67,7 +67,7 @@ impl UnsafePinned { impl UnsafePinned { /// Get read-write access to the contents of a pinned `UnsafePinned`. - #[inline(always)] + #[rustc_early_inline] #[must_use] #[unstable(feature = "unsafe_pinned", issue = "125735")] pub const fn get_mut_pinned(self: Pin<&mut Self>) -> *mut T { @@ -79,7 +79,7 @@ impl UnsafePinned { /// /// You should usually be using `get_mut_pinned` instead to explicitly track the fact that this /// memory is "pinned" due to there being aliases. - #[inline(always)] + #[rustc_early_inline] #[must_use] #[unstable(feature = "unsafe_pinned", issue = "125735")] pub const fn get_mut_unchecked(&mut self) -> *mut T { @@ -104,7 +104,7 @@ impl UnsafePinned { /// assert_eq!(ptr.read(), 1); /// } /// ``` - #[inline(always)] + #[rustc_early_inline] #[must_use] #[unstable(feature = "unsafe_pinned", issue = "125735")] pub const fn get(&self) -> *mut T { @@ -117,7 +117,7 @@ impl UnsafePinned { /// avoid the creation of temporary references. /// /// [`get`]: UnsafePinned::get - #[inline(always)] + #[rustc_early_inline] #[must_use] #[unstable(feature = "unsafe_pinned", issue = "125735")] pub const fn raw_get(this: *const Self) -> *const T { @@ -131,7 +131,7 @@ impl UnsafePinned { /// /// [`get_mut_pinned`]: UnsafePinned::get_mut_pinned /// [`get_mut_unchecked`]: UnsafePinned::get_mut_unchecked - #[inline(always)] + #[rustc_early_inline] #[must_use] #[unstable(feature = "unsafe_pinned", issue = "125735")] pub const fn raw_get_mut(this: *mut Self) -> *mut T { diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index 2ad520b7ead72..39134b6ccc633 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -44,7 +44,7 @@ impl *const T { #[stable(feature = "ptr_cast", since = "1.38.0")] #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")] #[rustc_diagnostic_item = "const_ptr_cast"] - #[inline(always)] + #[rustc_early_inline] pub const fn cast(self) -> *const U { self as _ } @@ -141,7 +141,7 @@ impl *const T { #[stable(feature = "ptr_const_cast", since = "1.65.0")] #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")] #[rustc_diagnostic_item = "ptr_cast_mut"] - #[inline(always)] + #[rustc_early_inline] pub const fn cast_mut(self) -> *mut T { self as _ } @@ -169,7 +169,7 @@ impl *const T { /// /// This is a [Strict Provenance][crate::ptr#strict-provenance] API. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "strict_provenance", since = "1.84.0")] pub fn addr(self) -> usize { // A pointer-to-integer transmute currently has exactly the right semantics: it returns the @@ -202,7 +202,7 @@ impl *const T { /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API. /// /// [`with_exposed_provenance`]: with_exposed_provenance - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "exposed_provenance", since = "1.84.0")] pub fn expose_provenance(self) -> usize { self.cast::<()>() as usize @@ -396,7 +396,7 @@ impl *const T { #[stable(feature = "rust1", since = "1.0.0")] #[must_use = "returns a new pointer rather than modifying its argument"] #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn offset(self, count: isize) -> *const T where @@ -447,7 +447,7 @@ impl *const T { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[track_caller] @@ -511,7 +511,7 @@ impl *const T { #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")] #[must_use = "returns a new pointer rather than modifying its argument"] #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_offset(self, count: isize) -> *const T where T: Sized, @@ -531,7 +531,7 @@ impl *const T { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] pub const fn wrapping_byte_offset(self, count: isize) -> Self { @@ -571,7 +571,7 @@ impl *const T { /// ``` #[unstable(feature = "ptr_mask", issue = "98290")] #[must_use = "returns a new pointer rather than modifying its argument"] - #[inline(always)] + #[rustc_early_inline] pub fn mask(self, mask: usize) -> *const T { intrinsics::ptr_mask(self.cast::<()>(), mask).with_metadata_of(self) } @@ -680,7 +680,7 @@ impl *const T { /// /// For non-`Sized` pointees this operation considers only the data pointers, /// ignoring the metadata. - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces @@ -876,7 +876,7 @@ impl *const T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[must_use = "returns a new pointer rather than modifying its argument"] #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn add(self, count: usize) -> Self where @@ -926,7 +926,7 @@ impl *const T { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[track_caller] @@ -982,7 +982,7 @@ impl *const T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[must_use = "returns a new pointer rather than modifying its argument"] #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn sub(self, count: usize) -> Self where @@ -1038,7 +1038,7 @@ impl *const T { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[track_caller] @@ -1101,7 +1101,7 @@ impl *const T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[must_use = "returns a new pointer rather than modifying its argument"] #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_add(self, count: usize) -> Self where T: Sized, @@ -1119,7 +1119,7 @@ impl *const T { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] pub const fn wrapping_byte_add(self, count: usize) -> Self { @@ -1180,7 +1180,7 @@ impl *const T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[must_use = "returns a new pointer rather than modifying its argument"] #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_sub(self, count: usize) -> Self where T: Sized, @@ -1198,7 +1198,7 @@ impl *const T { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] pub const fn wrapping_byte_sub(self, count: usize) -> Self { @@ -1463,7 +1463,7 @@ impl *const [T] { /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3); /// assert!(!slice.is_empty()); /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "slice_ptr_len", since = "1.79.0")] #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")] pub const fn is_empty(self) -> bool { diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index 1a2a5182567b4..78af115518115 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -520,7 +520,7 @@ mod mut_ptr; #[doc(alias = "memcpy")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] -#[inline(always)] +#[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[rustc_diagnostic_item = "ptr_copy_nonoverlapping"] pub const unsafe fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize) { @@ -617,7 +617,7 @@ pub const unsafe fn copy_nonoverlapping(src: *const T, dst: *mut T, count: us #[doc(alias = "memmove")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] -#[inline(always)] +#[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[rustc_diagnostic_item = "ptr_copy"] pub const unsafe fn copy(src: *const T, dst: *mut T, count: usize) { @@ -691,7 +691,7 @@ pub const unsafe fn copy(src: *const T, dst: *mut T, count: usize) { #[doc(alias = "memset")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")] -#[inline(always)] +#[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[rustc_diagnostic_item = "ptr_write_bytes"] pub const unsafe fn write_bytes(dst: *mut T, val: u8, count: usize) { @@ -824,7 +824,7 @@ pub unsafe fn drop_in_place(to_drop: *mut T) { /// assert!(p.is_null()); /// assert_eq!(p as usize, 0); // this pointer has the address 0 /// ``` -#[inline(always)] +#[rustc_early_inline] #[must_use] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] @@ -849,7 +849,7 @@ pub const fn null() -> *const T { /// assert!(p.is_null()); /// assert_eq!(p as usize, 0); // this pointer has the address 0 /// ``` -#[inline(always)] +#[rustc_early_inline] #[must_use] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] @@ -872,7 +872,7 @@ pub const fn null_mut() -> *mut T { /// exposed provenance. See [`with_exposed_provenance`] for more details on that operation. /// /// This is a [Strict Provenance][crate::ptr#strict-provenance] API. -#[inline(always)] +#[rustc_early_inline] #[must_use] #[stable(feature = "strict_provenance", since = "1.84.0")] #[rustc_const_stable(feature = "strict_provenance", since = "1.84.0")] @@ -889,7 +889,7 @@ pub const fn without_provenance(addr: usize) -> *const T { /// a `T`, which means this must not be used as a "not yet initialized" /// sentinel value. Types that lazily allocate must track initialization by /// some other means. -#[inline(always)] +#[rustc_early_inline] #[must_use] #[stable(feature = "strict_provenance", since = "1.84.0")] #[rustc_const_stable(feature = "strict_provenance", since = "1.84.0")] @@ -910,7 +910,7 @@ pub const fn dangling() -> *const T { /// exposed provenance. See [`with_exposed_provenance_mut`] for more details on that operation. /// /// This is a [Strict Provenance][crate::ptr#strict-provenance] API. -#[inline(always)] +#[rustc_early_inline] #[must_use] #[stable(feature = "strict_provenance", since = "1.84.0")] #[rustc_const_stable(feature = "strict_provenance", since = "1.84.0")] @@ -932,7 +932,7 @@ pub const fn without_provenance_mut(addr: usize) -> *mut T { /// a `T`, which means this must not be used as a "not yet initialized" /// sentinel value. Types that lazily allocate must track initialization by /// some other means. -#[inline(always)] +#[rustc_early_inline] #[must_use] #[stable(feature = "strict_provenance", since = "1.84.0")] #[rustc_const_stable(feature = "strict_provenance", since = "1.84.0")] @@ -972,7 +972,7 @@ pub const fn dangling_mut() -> *mut T { /// /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API. #[must_use] -#[inline(always)] +#[rustc_early_inline] #[stable(feature = "exposed_provenance", since = "1.84.0")] #[rustc_const_unstable(feature = "const_exposed_provenance", issue = "144538")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces @@ -1013,7 +1013,7 @@ pub const fn with_exposed_provenance(addr: usize) -> *const T { /// /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API. #[must_use] -#[inline(always)] +#[rustc_early_inline] #[stable(feature = "exposed_provenance", since = "1.84.0")] #[rustc_const_unstable(feature = "const_exposed_provenance", issue = "144538")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces @@ -1069,7 +1069,7 @@ pub const fn with_exposed_provenance_mut(addr: usize) -> *mut T { /// let p = ptr::from_ref(&x); /// unsafe { p.read() }; /// ``` -#[inline(always)] +#[rustc_early_inline] #[must_use] #[stable(feature = "ptr_from_ref", since = "1.76.0")] #[rustc_const_stable(feature = "ptr_from_ref", since = "1.76.0")] @@ -1120,7 +1120,7 @@ pub const fn from_ref(r: &T) -> *const T { /// let p = ptr::from_mut(&mut x); /// unsafe { p.write(T::default()) }; /// ``` -#[inline(always)] +#[rustc_early_inline] #[must_use] #[stable(feature = "ptr_from_ref", since = "1.76.0")] #[rustc_const_stable(feature = "ptr_from_ref", since = "1.76.0")] @@ -2435,7 +2435,7 @@ pub(crate) unsafe fn align_offset(p: *const T, a: usize) -> usize { /// assert!(!std::ptr::eq(&a[0..2], &a[1..3])); /// ``` #[stable(feature = "ptr_eq", since = "1.17.0")] -#[inline(always)] +#[rustc_early_inline] #[must_use = "pointer comparison produces a value"] #[rustc_diagnostic_item = "ptr_eq"] #[allow(ambiguous_wide_pointer_comparisons)] // it's actually clear here @@ -2461,7 +2461,7 @@ pub fn eq(a: *const T, b: *const T) -> bool { /// assert!(!ptr::eq::(whole, first)); /// ``` #[stable(feature = "ptr_addr_eq", since = "1.76.0")] -#[inline(always)] +#[rustc_early_inline] #[must_use = "pointer comparison produces a value"] pub fn addr_eq(p: *const T, q: *const U) -> bool { (p as *const ()) == (q as *const ()) @@ -2514,7 +2514,7 @@ pub fn addr_eq(p: *const T, q: *const U) -> bo /// /// [subtype]: https://doc.rust-lang.org/reference/subtyping.html #[stable(feature = "ptr_fn_addr_eq", since = "1.85.0")] -#[inline(always)] +#[rustc_early_inline] #[must_use = "function pointer comparison produces a value"] pub fn fn_addr_eq(f: T, g: U) -> bool { f.addr() == g.addr() diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 579e2461103d8..cc7659917c00c 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -27,7 +27,7 @@ impl *mut T { #[stable(feature = "ptr_cast", since = "1.38.0")] #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")] #[rustc_diagnostic_item = "ptr_cast"] - #[inline(always)] + #[rustc_early_inline] pub const fn cast(self) -> *mut U { self as _ } @@ -129,7 +129,7 @@ impl *mut T { #[stable(feature = "ptr_const_cast", since = "1.65.0")] #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")] #[rustc_diagnostic_item = "ptr_cast_const"] - #[inline(always)] + #[rustc_early_inline] pub const fn cast_const(self) -> *const T { self as _ } @@ -157,7 +157,7 @@ impl *mut T { /// /// This is a [Strict Provenance][crate::ptr#strict-provenance] API. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "strict_provenance", since = "1.84.0")] pub fn addr(self) -> usize { // A pointer-to-integer transmute currently has exactly the right semantics: it returns the @@ -190,7 +190,7 @@ impl *mut T { /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API. /// /// [`with_exposed_provenance_mut`]: with_exposed_provenance_mut - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "exposed_provenance", since = "1.84.0")] pub fn expose_provenance(self) -> usize { self.cast::<()>() as usize @@ -394,7 +394,7 @@ impl *mut T { #[stable(feature = "rust1", since = "1.0.0")] #[must_use = "returns a new pointer rather than modifying its argument"] #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn offset(self, count: isize) -> *mut T where @@ -447,7 +447,7 @@ impl *mut T { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[track_caller] @@ -508,7 +508,7 @@ impl *mut T { #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")] #[must_use = "returns a new pointer rather than modifying its argument"] #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_offset(self, count: isize) -> *mut T where T: Sized, @@ -528,7 +528,7 @@ impl *mut T { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] pub const fn wrapping_byte_offset(self, count: isize) -> Self { @@ -571,7 +571,7 @@ impl *mut T { /// ``` #[unstable(feature = "ptr_mask", issue = "98290")] #[must_use = "returns a new pointer rather than modifying its argument"] - #[inline(always)] + #[rustc_early_inline] pub fn mask(self, mask: usize) -> *mut T { intrinsics::ptr_mask(self.cast::<()>(), mask).cast_mut().with_metadata_of(self) } @@ -835,7 +835,7 @@ impl *mut T { /// ``` #[stable(feature = "ptr_offset_from", since = "1.47.0")] #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")] - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn offset_from(self, origin: *const T) -> isize where @@ -854,7 +854,7 @@ impl *mut T { /// /// For non-`Sized` pointees this operation considers only the data pointers, /// ignoring the metadata. - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces @@ -969,7 +969,7 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[must_use = "returns a new pointer rather than modifying its argument"] #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn add(self, count: usize) -> Self where @@ -1019,7 +1019,7 @@ impl *mut T { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[track_caller] @@ -1075,7 +1075,7 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[must_use = "returns a new pointer rather than modifying its argument"] #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn sub(self, count: usize) -> Self where @@ -1131,7 +1131,7 @@ impl *mut T { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[track_caller] @@ -1191,7 +1191,7 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[must_use = "returns a new pointer rather than modifying its argument"] #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_add(self, count: usize) -> Self where T: Sized, @@ -1209,7 +1209,7 @@ impl *mut T { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] pub const fn wrapping_byte_add(self, count: usize) -> Self { @@ -1267,7 +1267,7 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[must_use = "returns a new pointer rather than modifying its argument"] #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] - #[inline(always)] + #[rustc_early_inline] pub const fn wrapping_sub(self, count: usize) -> Self where T: Sized, @@ -1285,7 +1285,7 @@ impl *mut T { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] pub const fn wrapping_byte_sub(self, count: usize) -> Self { @@ -1300,7 +1300,7 @@ impl *mut T { /// [`ptr::read`]: crate::ptr::read() #[stable(feature = "pointer_methods", since = "1.26.0")] #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn read(self) -> T where @@ -1321,7 +1321,7 @@ impl *mut T { /// /// [`ptr::read_volatile`]: crate::ptr::read_volatile() #[stable(feature = "pointer_methods", since = "1.26.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub unsafe fn read_volatile(self) -> T where @@ -1341,7 +1341,7 @@ impl *mut T { /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned() #[stable(feature = "pointer_methods", since = "1.26.0")] #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn read_unaligned(self) -> T where @@ -1361,7 +1361,7 @@ impl *mut T { /// [`ptr::copy`]: crate::ptr::copy() #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] #[stable(feature = "pointer_methods", since = "1.26.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn copy_to(self, dest: *mut T, count: usize) where @@ -1381,7 +1381,7 @@ impl *mut T { /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping() #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] #[stable(feature = "pointer_methods", since = "1.26.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize) where @@ -1401,7 +1401,7 @@ impl *mut T { /// [`ptr::copy`]: crate::ptr::copy() #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] #[stable(feature = "pointer_methods", since = "1.26.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn copy_from(self, src: *const T, count: usize) where @@ -1421,7 +1421,7 @@ impl *mut T { /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping() #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] #[stable(feature = "pointer_methods", since = "1.26.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize) where @@ -1437,7 +1437,7 @@ impl *mut T { /// /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place() #[stable(feature = "pointer_methods", since = "1.26.0")] - #[inline(always)] + #[rustc_early_inline] pub unsafe fn drop_in_place(self) { // SAFETY: the caller must uphold the safety contract for `drop_in_place`. unsafe { drop_in_place(self) } @@ -1451,7 +1451,7 @@ impl *mut T { /// [`ptr::write`]: crate::ptr::write() #[stable(feature = "pointer_methods", since = "1.26.0")] #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn write(self, val: T) where @@ -1470,7 +1470,7 @@ impl *mut T { #[doc(alias = "memset")] #[stable(feature = "pointer_methods", since = "1.26.0")] #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn write_bytes(self, val: u8, count: usize) where @@ -1491,7 +1491,7 @@ impl *mut T { /// /// [`ptr::write_volatile`]: crate::ptr::write_volatile() #[stable(feature = "pointer_methods", since = "1.26.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub unsafe fn write_volatile(self, val: T) where @@ -1511,7 +1511,7 @@ impl *mut T { /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned() #[stable(feature = "pointer_methods", since = "1.26.0")] #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")] - #[inline(always)] + #[rustc_early_inline] #[track_caller] pub const unsafe fn write_unaligned(self, val: T) where @@ -1529,7 +1529,7 @@ impl *mut T { /// [`ptr::replace`]: crate::ptr::replace() #[stable(feature = "pointer_methods", since = "1.26.0")] #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")] - #[inline(always)] + #[rustc_early_inline] pub const unsafe fn replace(self, src: T) -> T where T: Sized, @@ -1547,7 +1547,7 @@ impl *mut T { /// [`ptr::swap`]: crate::ptr::swap() #[stable(feature = "pointer_methods", since = "1.26.0")] #[rustc_const_stable(feature = "const_swap", since = "1.85.0")] - #[inline(always)] + #[rustc_early_inline] pub const unsafe fn swap(self, with: *mut T) where T: Sized, @@ -1703,7 +1703,7 @@ impl *mut [T] { /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3); /// assert_eq!(slice.len(), 3); /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "slice_ptr_len", since = "1.79.0")] #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")] pub const fn len(self) -> usize { @@ -1720,7 +1720,7 @@ impl *mut [T] { /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3); /// assert!(!slice.is_empty()); /// ``` - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "slice_ptr_len", since = "1.79.0")] #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")] pub const fn is_empty(self) -> bool { @@ -1782,7 +1782,7 @@ impl *mut [T] { /// assert_eq!(&*right, [3, 0, 5, 6]); /// } /// ``` - #[inline(always)] + #[rustc_early_inline] #[track_caller] #[unstable(feature = "raw_slice_split", issue = "95595")] pub unsafe fn split_at_mut(self, mid: usize) -> (*mut [T], *mut [T]) { @@ -1827,7 +1827,7 @@ impl *mut [T] { /// } /// assert_eq!(v, [1, 2, 3, 4, 5, 6]); /// ``` - #[inline(always)] + #[rustc_early_inline] #[unstable(feature = "raw_slice_split", issue = "95595")] pub unsafe fn split_at_mut_unchecked(self, mid: usize) -> (*mut [T], *mut [T]) { let len = self.len(); @@ -1854,7 +1854,7 @@ impl *mut [T] { /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3); /// assert_eq!(slice.as_mut_ptr(), ptr::null_mut()); /// ``` - #[inline(always)] + #[rustc_early_inline] #[unstable(feature = "slice_ptr_get", issue = "74265")] pub const fn as_mut_ptr(self) -> *mut T { self as *mut T @@ -1882,7 +1882,7 @@ impl *mut [T] { /// ``` #[unstable(feature = "slice_ptr_get", issue = "74265")] #[rustc_const_unstable(feature = "const_index", issue = "143775")] - #[inline(always)] + #[rustc_early_inline] pub const unsafe fn get_unchecked_mut(self, index: I) -> *mut I::Output where I: ~const SliceIndex<[T]>, diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs index 62da6567cca75..08e0ca76889dc 100644 --- a/library/core/src/ptr/non_null.rs +++ b/library/core/src/ptr/non_null.rs @@ -390,7 +390,7 @@ impl NonNull { #[rustc_const_stable(feature = "const_nonnull_as_ptr", since = "1.32.0")] #[rustc_never_returns_null_ptr] #[must_use] - #[inline(always)] + #[rustc_early_inline] pub const fn as_ptr(self) -> *mut T { // This is a transmute for the same reasons as `NonZero::get`. @@ -429,7 +429,7 @@ impl NonNull { #[stable(feature = "nonnull", since = "1.25.0")] #[rustc_const_stable(feature = "const_nonnull_as_ref", since = "1.73.0")] #[must_use] - #[inline(always)] + #[rustc_early_inline] pub const unsafe fn as_ref<'a>(&self) -> &'a T { // SAFETY: the caller must guarantee that `self` meets all the // requirements for a reference. @@ -467,7 +467,7 @@ impl NonNull { #[stable(feature = "nonnull", since = "1.25.0")] #[rustc_const_stable(feature = "const_ptr_as_ref", since = "1.83.0")] #[must_use] - #[inline(always)] + #[rustc_early_inline] pub const unsafe fn as_mut<'a>(&mut self) -> &'a mut T { // SAFETY: the caller must guarantee that `self` meets all the // requirements for a mutable reference. @@ -560,7 +560,7 @@ impl NonNull { /// println!("{}", ptr.offset(2).read()); /// } /// ``` - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[must_use = "returns a new pointer rather than modifying its argument"] #[stable(feature = "non_null_convenience", since = "1.80.0")] @@ -587,7 +587,7 @@ impl NonNull { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")] @@ -636,7 +636,7 @@ impl NonNull { /// println!("{}", ptr.add(2).read() as char); /// } /// ``` - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[must_use = "returns a new pointer rather than modifying its argument"] #[stable(feature = "non_null_convenience", since = "1.80.0")] @@ -663,7 +663,7 @@ impl NonNull { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")] @@ -713,7 +713,7 @@ impl NonNull { /// println!("{}", end.sub(2).read() as char); /// } /// ``` - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[must_use = "returns a new pointer rather than modifying its argument"] #[stable(feature = "non_null_convenience", since = "1.80.0")] @@ -745,7 +745,7 @@ impl NonNull { /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. #[must_use] - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")] @@ -864,7 +864,7 @@ impl NonNull { /// /// For non-`Sized` pointees this operation considers only the data pointers, /// ignoring the metadata. - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")] @@ -956,7 +956,7 @@ impl NonNull { /// /// For non-`Sized` pointees this operation considers only the data pointers, /// ignoring the metadata. - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "ptr_sub_ptr", since = "1.87.0")] #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")] @@ -1032,7 +1032,7 @@ impl NonNull { /// See [`ptr::copy`] for safety concerns and examples. /// /// [`ptr::copy`]: crate::ptr::copy() - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] @@ -1052,7 +1052,7 @@ impl NonNull { /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples. /// /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping() - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] @@ -1072,7 +1072,7 @@ impl NonNull { /// See [`ptr::copy`] for safety concerns and examples. /// /// [`ptr::copy`]: crate::ptr::copy() - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] @@ -1092,7 +1092,7 @@ impl NonNull { /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples. /// /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping() - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] @@ -1109,7 +1109,7 @@ impl NonNull { /// See [`ptr::drop_in_place`] for safety concerns and examples. /// /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place() - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "non_null_convenience", since = "1.80.0")] pub unsafe fn drop_in_place(self) { // SAFETY: the caller must uphold the safety contract for `drop_in_place`. @@ -1122,7 +1122,7 @@ impl NonNull { /// See [`ptr::write`] for safety concerns and examples. /// /// [`ptr::write`]: crate::ptr::write() - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")] @@ -1140,7 +1140,7 @@ impl NonNull { /// See [`ptr::write_bytes`] for safety concerns and examples. /// /// [`ptr::write_bytes`]: crate::ptr::write_bytes() - #[inline(always)] + #[rustc_early_inline] #[doc(alias = "memset")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] @@ -1163,7 +1163,7 @@ impl NonNull { /// See [`ptr::write_volatile`] for safety concerns and examples. /// /// [`ptr::write_volatile`]: crate::ptr::write_volatile() - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] pub unsafe fn write_volatile(self, val: T) @@ -1182,7 +1182,7 @@ impl NonNull { /// See [`ptr::write_unaligned`] for safety concerns and examples. /// /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned() - #[inline(always)] + #[rustc_early_inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")] @@ -1200,7 +1200,7 @@ impl NonNull { /// See [`ptr::replace`] for safety concerns and examples. /// /// [`ptr::replace`]: crate::ptr::replace() - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")] pub const unsafe fn replace(self, src: T) -> T @@ -1218,7 +1218,7 @@ impl NonNull { /// See [`ptr::swap`] for safety concerns and examples. /// /// [`ptr::swap`]: crate::ptr::swap() - #[inline(always)] + #[rustc_early_inline] #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_swap", since = "1.85.0")] pub const unsafe fn swap(self, with: NonNull) diff --git a/library/core/src/result.rs b/library/core/src/result.rs index 474f86395ae0e..a9f3bbd26dc15 100644 --- a/library/core/src/result.rs +++ b/library/core/src/result.rs @@ -1211,7 +1211,7 @@ impl Result { /// let x: Result = Err("emergency failure"); /// x.unwrap(); // panics with `emergency failure` /// ``` - #[inline(always)] + #[rustc_early_inline] #[track_caller] #[stable(feature = "rust1", since = "1.0.0")] pub fn unwrap(self) -> T diff --git a/library/core/src/slice/index.rs b/library/core/src/slice/index.rs index 322b3580eded2..37d3d278aaf07 100644 --- a/library/core/src/slice/index.rs +++ b/library/core/src/slice/index.rs @@ -86,7 +86,7 @@ const fn slice_end_index_overflow_fail() -> ! { // Both the safe and unsafe public methods share these helpers, // which use intrinsics directly to get *no* extra checks. -#[inline(always)] +#[rustc_early_inline] const unsafe fn get_offset_len_noubcheck( ptr: *const [T], offset: usize, @@ -98,7 +98,7 @@ const unsafe fn get_offset_len_noubcheck( crate::intrinsics::aggregate_raw_ptr(ptr, len) } -#[inline(always)] +#[rustc_early_inline] const unsafe fn get_offset_len_mut_noubcheck( ptr: *mut [T], offset: usize, diff --git a/library/core/src/slice/iter/macros.rs b/library/core/src/slice/iter/macros.rs index 7c1ed3fe8a246..28bdd3a7b011f 100644 --- a/library/core/src/slice/iter/macros.rs +++ b/library/core/src/slice/iter/macros.rs @@ -84,7 +84,7 @@ macro_rules! iterator { } // Helper function for creating a slice from the iterator. - #[inline(always)] + #[rustc_early_inline] fn make_slice(&self) -> &'a [T] { // SAFETY: the iterator was created from a slice with pointer // `self.ptr` and length `len!(self)`. This guarantees that all @@ -95,7 +95,7 @@ macro_rules! iterator { // Helper function for moving the start of the iterator forwards by `offset` elements, // returning the old start. // Unsafe because the offset must not exceed `self.len()`. - #[inline(always)] + #[rustc_early_inline] unsafe fn post_inc_start(&mut self, offset: usize) -> NonNull { let old = self.ptr; @@ -114,7 +114,7 @@ macro_rules! iterator { // Helper function for moving the end of the iterator backwards by `offset` elements, // returning the new end. // Unsafe because the offset must not exceed `self.len()`. - #[inline(always)] + #[rustc_early_inline] unsafe fn pre_dec_end(&mut self, offset: usize) -> NonNull { if_zst!(mut self, // SAFETY: By our precondition, `offset` can be at most the diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs index 1dddc48e68e97..e148a4b4eb8da 100644 --- a/library/core/src/slice/mod.rs +++ b/library/core/src/slice/mod.rs @@ -719,7 +719,7 @@ impl [T] { #[rustc_const_stable(feature = "const_slice_as_ptr", since = "1.32.0")] #[rustc_never_returns_null_ptr] #[rustc_as_ptr] - #[inline(always)] + #[rustc_early_inline] #[must_use] pub const fn as_ptr(&self) -> *const T { self as *const [T] as *const T @@ -750,7 +750,7 @@ impl [T] { #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] #[rustc_never_returns_null_ptr] #[rustc_as_ptr] - #[inline(always)] + #[rustc_early_inline] #[must_use] pub const fn as_mut_ptr(&mut self) -> *mut T { self as *mut [T] as *mut T diff --git a/library/core/src/slice/sort/shared/mod.rs b/library/core/src/slice/sort/shared/mod.rs index e2cdcb3dd511d..db318d3d0a100 100644 --- a/library/core/src/slice/sort/shared/mod.rs +++ b/library/core/src/slice/sort/shared/mod.rs @@ -16,7 +16,7 @@ impl FreezeMarker for T {} /// /// Returns the length of the run, and a bool that is false when the run /// is ascending, and true if the run strictly descending. -#[inline(always)] +#[rustc_early_inline] pub(crate) fn find_existing_run bool>( v: &[T], is_less: &mut F, diff --git a/library/core/src/slice/sort/shared/pivot.rs b/library/core/src/slice/sort/shared/pivot.rs index 9eb60f854ce21..e4d50aaa0cf3b 100644 --- a/library/core/src/slice/sort/shared/pivot.rs +++ b/library/core/src/slice/sort/shared/pivot.rs @@ -75,7 +75,7 @@ unsafe fn median3_rec bool>( /// Calculates the median of 3 elements. /// /// SAFETY: a, b, c must be valid initialized elements. -#[inline(always)] +#[rustc_early_inline] fn median3 bool>(a: &T, b: &T, c: &T, is_less: &mut F) -> *const T { // Compiler tends to make this branchless when sensible, and avoids the // third comparison when not. diff --git a/library/core/src/slice/sort/shared/smallsort.rs b/library/core/src/slice/sort/shared/smallsort.rs index 400daba16c1b8..32af10362a06c 100644 --- a/library/core/src/slice/sort/shared/smallsort.rs +++ b/library/core/src/slice/sort/shared/smallsort.rs @@ -679,7 +679,7 @@ unsafe fn sort8_stable bool>( } } -#[inline(always)] +#[rustc_early_inline] unsafe fn merge_up bool>( mut left_src: *const T, mut right_src: *const T, @@ -712,7 +712,7 @@ unsafe fn merge_up bool>( (left_src, right_src, dst) } -#[inline(always)] +#[rustc_early_inline] unsafe fn merge_down bool>( mut left_src: *const T, mut right_src: *const T, diff --git a/library/core/src/slice/sort/stable/drift.rs b/library/core/src/slice/sort/stable/drift.rs index 1edffe095a89d..c2cb62c0d3bf6 100644 --- a/library/core/src/slice/sort/stable/drift.rs +++ b/library/core/src/slice/sort/stable/drift.rs @@ -152,7 +152,7 @@ pub fn sort bool>( // x < 2^63 + 2n // So as long as n < 2^62 we find that x < 2^64, meaning our operations do not // overflow. -#[inline(always)] +#[rustc_early_inline] fn merge_tree_scale_factor(n: usize) -> u64 { if usize::BITS > u64::BITS { panic!("Platform not supported"); @@ -163,7 +163,7 @@ fn merge_tree_scale_factor(n: usize) -> u64 { // Note: merge_tree_depth output is < 64 when left < right as f*x and f*y must // differ in some bit, and is <= 64 always. -#[inline(always)] +#[rustc_early_inline] fn merge_tree_depth(left: usize, mid: usize, right: usize, scale_factor: u64) -> u8 { let x = left as u64 + mid as u64; let y = mid as u64 + right as u64; @@ -187,7 +187,7 @@ fn sqrt_approx(n: usize) -> usize { } // Lazy logical runs as in Glidesort. -#[inline(always)] +#[rustc_early_inline] fn logical_merge bool>( v: &mut [T], scratch: &mut [MaybeUninit], @@ -276,22 +276,22 @@ fn stable_quicksort bool>( struct DriftsortRun(usize); impl DriftsortRun { - #[inline(always)] + #[rustc_early_inline] fn new_sorted(length: usize) -> Self { Self((length << 1) | 1) } - #[inline(always)] + #[rustc_early_inline] fn new_unsorted(length: usize) -> Self { Self(length << 1) } - #[inline(always)] + #[rustc_early_inline] fn sorted(self) -> bool { self.0 & 1 == 1 } - #[inline(always)] + #[rustc_early_inline] fn len(self) -> usize { self.0 >> 1 } diff --git a/library/core/src/slice/sort/stable/mod.rs b/library/core/src/slice/sort/stable/mod.rs index 8b4e5c0c8c3a1..d67d27bf40db8 100644 --- a/library/core/src/slice/sort/stable/mod.rs +++ b/library/core/src/slice/sort/stable/mod.rs @@ -25,7 +25,7 @@ pub(crate) mod tiny; /// /// Upholds all safety properties outlined here: /// -#[inline(always)] +#[rustc_early_inline] pub fn sort bool, BufT: BufGuard>(v: &mut [T], is_less: &mut F) { // Arrays of zero-sized types are always all-equal, and thus sorted. if T::IS_ZST { diff --git a/library/core/src/slice/sort/stable/tiny.rs b/library/core/src/slice/sort/stable/tiny.rs index 071ab8e107fe3..5e174e42baa88 100644 --- a/library/core/src/slice/sort/stable/tiny.rs +++ b/library/core/src/slice/sort/stable/tiny.rs @@ -6,7 +6,7 @@ use crate::slice::sort::stable::merge; /// Tiny recursive top-down merge sort optimized for binary size. It has no adaptiveness whatsoever, /// no run detection, etc. -#[inline(always)] +#[rustc_early_inline] pub fn mergesort bool>( v: &mut [T], scratch: &mut [MaybeUninit], diff --git a/library/core/src/slice/sort/unstable/heapsort.rs b/library/core/src/slice/sort/unstable/heapsort.rs index 85231779d031f..cdadbf584ef50 100644 --- a/library/core/src/slice/sort/unstable/heapsort.rs +++ b/library/core/src/slice/sort/unstable/heapsort.rs @@ -33,7 +33,7 @@ where // This binary heap respects the invariant `parent >= child`. // // SAFETY: The caller has to guarantee that `node <= v.len()`. -#[inline(always)] +#[rustc_early_inline] unsafe fn sift_down(v: &mut [T], mut node: usize, is_less: &mut F) where F: FnMut(&T, &T) -> bool, diff --git a/library/core/src/slice/sort/unstable/mod.rs b/library/core/src/slice/sort/unstable/mod.rs index d4df8d3a264db..b54b592852bd6 100644 --- a/library/core/src/slice/sort/unstable/mod.rs +++ b/library/core/src/slice/sort/unstable/mod.rs @@ -16,7 +16,7 @@ pub(crate) mod quicksort; /// /// Upholds all safety properties outlined here: /// -#[inline(always)] +#[rustc_early_inline] pub fn sort bool>(v: &mut [T], is_less: &mut F) { // Arrays of zero-sized types are always all-equal, and thus sorted. if T::IS_ZST { diff --git a/library/core/src/str/mod.rs b/library/core/src/str/mod.rs index c40af4de7e03d..a5407ab3bb27e 100644 --- a/library/core/src/str/mod.rs +++ b/library/core/src/str/mod.rs @@ -481,7 +481,7 @@ impl str { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "str_as_bytes", since = "1.39.0")] #[must_use] - #[inline(always)] + #[rustc_early_inline] #[allow(unused_attributes)] pub const fn as_bytes(&self) -> &[u8] { // SAFETY: const sound because we transmute two types with the same layout @@ -527,7 +527,7 @@ impl str { #[stable(feature = "str_mut_extras", since = "1.20.0")] #[rustc_const_stable(feature = "const_str_as_mut", since = "1.83.0")] #[must_use] - #[inline(always)] + #[rustc_early_inline] pub const unsafe fn as_bytes_mut(&mut self) -> &mut [u8] { // SAFETY: the cast from `&str` to `&[u8]` is safe since `str` // has the same layout as `&[u8]` (only std can make this guarantee). @@ -558,7 +558,7 @@ impl str { #[rustc_never_returns_null_ptr] #[rustc_as_ptr] #[must_use] - #[inline(always)] + #[rustc_early_inline] pub const fn as_ptr(&self) -> *const u8 { self as *const str as *const u8 } @@ -576,7 +576,7 @@ impl str { #[rustc_never_returns_null_ptr] #[rustc_as_ptr] #[must_use] - #[inline(always)] + #[rustc_early_inline] pub const fn as_mut_ptr(&mut self) -> *mut u8 { self as *mut str as *mut u8 } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff index 2c89670dcf7d7..258b26fcb5cfb 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff @@ -24,12 +24,12 @@ let _7: *const [bool; 0]; scope 10 { } - scope 11 (inlined NonZero::::get) { - } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 11 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 12 (inlined without_provenance_mut::<[bool; 0]>) { } } + scope 13 (inlined NonZero::::get) { + } } } scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff index 8fecfe224cc69..11817dbf37cae 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff @@ -24,12 +24,12 @@ let _7: *const [bool; 0]; scope 10 { } - scope 11 (inlined NonZero::::get) { - } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 11 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 12 (inlined without_provenance_mut::<[bool; 0]>) { } } + scope 13 (inlined NonZero::::get) { + } } } scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff index 976ea252c2f89..1de75993f7ca4 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff @@ -24,12 +24,12 @@ let _7: *const [bool; 0]; scope 10 { } - scope 11 (inlined NonZero::::get) { - } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 11 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 12 (inlined without_provenance_mut::<[bool; 0]>) { } } + scope 13 (inlined NonZero::::get) { + } } } scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff index 6c59f5e3e2e86..ee28ff1709282 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff @@ -24,12 +24,12 @@ let _7: *const [bool; 0]; scope 10 { } - scope 11 (inlined NonZero::::get) { - } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 11 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 12 (inlined without_provenance_mut::<[bool; 0]>) { } } + scope 13 (inlined NonZero::::get) { + } } } scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff index 1f9cf6d6aca83..cc782bfe1d64d 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff @@ -24,12 +24,12 @@ let _7: *const [bool; 0]; scope 10 { } - scope 11 (inlined NonZero::::get) { - } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 11 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 12 (inlined without_provenance_mut::<[bool; 0]>) { } } + scope 13 (inlined NonZero::::get) { + } } } scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff index a8760285fac11..cbf60be440610 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff @@ -24,12 +24,12 @@ let _7: *const [bool; 0]; scope 10 { } - scope 11 (inlined NonZero::::get) { - } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 11 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 12 (inlined without_provenance_mut::<[bool; 0]>) { } } + scope 13 (inlined NonZero::::get) { + } } } scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff index c398ae70a1a3e..cbae64be50655 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff @@ -24,12 +24,12 @@ let _7: *const [bool; 0]; scope 10 { } - scope 11 (inlined NonZero::::get) { - } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 11 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 12 (inlined without_provenance_mut::<[bool; 0]>) { } } + scope 13 (inlined NonZero::::get) { + } } } scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff index 02934c02587d2..f326ea7c734e7 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff @@ -24,12 +24,12 @@ let _7: *const [bool; 0]; scope 10 { } - scope 11 (inlined NonZero::::get) { - } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 11 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 12 (inlined without_provenance_mut::<[bool; 0]>) { } } + scope 13 (inlined NonZero::::get) { + } } } scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { diff --git a/tests/mir-opt/dataflow-const-prop/sibling_ptr.main.DataflowConstProp.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/sibling_ptr.main.DataflowConstProp.panic-abort.diff index 178ebad6105ac..0a426a39bc8ef 100644 --- a/tests/mir-opt/dataflow-const-prop/sibling_ptr.main.DataflowConstProp.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/sibling_ptr.main.DataflowConstProp.panic-abort.diff @@ -7,12 +7,15 @@ let _2: (); let mut _4: *mut u8; let mut _5: *mut u8; + let mut _7: usize; scope 1 { debug x => _1; let _3: *mut u8; let _6: u8; scope 2 { debug p => _3; + scope 4 (inlined #[track_caller] std::ptr::mut_ptr::::add) { + } } scope 3 { debug x1 => _6; @@ -28,10 +31,11 @@ StorageLive(_4); StorageLive(_5); _5 = copy _3; - _4 = std::ptr::mut_ptr::::add(move _5, const 1_usize) -> [return: bb1, unwind unreachable]; - } - - bb1: { + StorageLive(_7); + _7 = const 1_usize; +- _4 = Offset(copy _5, copy _7); ++ _4 = Offset(copy _5, const 1_usize); + StorageDead(_7); StorageDead(_5); (*_4) = const 1_u8; StorageDead(_4); diff --git a/tests/mir-opt/dataflow-const-prop/sibling_ptr.main.DataflowConstProp.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/sibling_ptr.main.DataflowConstProp.panic-unwind.diff index ce2545589f183..0a426a39bc8ef 100644 --- a/tests/mir-opt/dataflow-const-prop/sibling_ptr.main.DataflowConstProp.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/sibling_ptr.main.DataflowConstProp.panic-unwind.diff @@ -7,12 +7,15 @@ let _2: (); let mut _4: *mut u8; let mut _5: *mut u8; + let mut _7: usize; scope 1 { debug x => _1; let _3: *mut u8; let _6: u8; scope 2 { debug p => _3; + scope 4 (inlined #[track_caller] std::ptr::mut_ptr::::add) { + } } scope 3 { debug x1 => _6; @@ -28,10 +31,11 @@ StorageLive(_4); StorageLive(_5); _5 = copy _3; - _4 = std::ptr::mut_ptr::::add(move _5, const 1_usize) -> [return: bb1, unwind continue]; - } - - bb1: { + StorageLive(_7); + _7 = const 1_usize; +- _4 = Offset(copy _5, copy _7); ++ _4 = Offset(copy _5, const 1_usize); + StorageDead(_7); StorageDead(_5); (*_4) = const 1_u8; StorageDead(_4); diff --git a/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.32bit.panic-abort.diff b/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.32bit.panic-abort.diff index 25ffff619e60b..e64b66fc88862 100644 --- a/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.32bit.panic-abort.diff +++ b/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.32bit.panic-abort.diff @@ -23,11 +23,11 @@ scope 4 { debug _x => _7; } - scope 18 (inlined foo) { + scope 16 (inlined foo) { } } - scope 16 (inlined slice_from_raw_parts::<()>) { - scope 17 (inlined std::ptr::from_raw_parts::<[()], ()>) { + scope 17 (inlined slice_from_raw_parts::<()>) { + scope 18 (inlined std::ptr::from_raw_parts::<[()], ()>) { } } } @@ -45,15 +45,15 @@ let _17: std::ptr::NonNull<[u8]>; scope 8 { scope 11 (inlined NonNull::<[u8]>::as_mut_ptr) { - scope 12 (inlined NonNull::<[u8]>::as_non_null_ptr) { - scope 13 (inlined NonNull::<[u8]>::cast::) { + scope 12 (inlined NonNull::::as_ptr) { + } + scope 13 (inlined NonNull::<[u8]>::as_non_null_ptr) { + scope 14 (inlined NonNull::<[u8]>::cast::) { let mut _22: *mut [u8]; - scope 14 (inlined NonNull::<[u8]>::as_ptr) { + scope 15 (inlined NonNull::<[u8]>::as_ptr) { } } } - scope 15 (inlined NonNull::::as_ptr) { - } } } scope 10 (inlined ::allocate) { diff --git a/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.32bit.panic-unwind.diff b/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.32bit.panic-unwind.diff index b2085afb71379..15ea60c9f9797 100644 --- a/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.32bit.panic-unwind.diff +++ b/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.32bit.panic-unwind.diff @@ -10,7 +10,7 @@ let mut _8: *const [()]; let mut _9: std::boxed::Box<()>; let mut _10: *const (); - let mut _11: usize; + let mut _23: usize; scope 1 { debug vp_ctx => _1; let _4: *const (); @@ -23,24 +23,107 @@ scope 4 { debug _x => _7; } - scope 7 (inlined foo) { + scope 16 (inlined foo) { } } - scope 5 (inlined slice_from_raw_parts::<()>) { - scope 6 (inlined std::ptr::from_raw_parts::<[()], ()>) { + scope 17 (inlined slice_from_raw_parts::<()>) { + scope 18 (inlined std::ptr::from_raw_parts::<[()], ()>) { } } } } + scope 5 (inlined Box::<()>::new) { + let mut _11: usize; + let mut _12: usize; + let mut _13: *mut u8; + scope 6 (inlined alloc::alloc::exchange_malloc) { + let _14: std::alloc::Layout; + let mut _15: std::result::Result, std::alloc::AllocError>; + let mut _16: isize; + let mut _18: !; + scope 7 { + let _17: std::ptr::NonNull<[u8]>; + scope 8 { + scope 11 (inlined NonNull::<[u8]>::as_mut_ptr) { + scope 12 (inlined NonNull::::as_ptr) { + } + scope 13 (inlined NonNull::<[u8]>::as_non_null_ptr) { + scope 14 (inlined NonNull::<[u8]>::cast::) { + let mut _22: *mut [u8]; + scope 15 (inlined NonNull::<[u8]>::as_ptr) { + } + } + } + } + } + scope 10 (inlined ::allocate) { + } + } + scope 9 (inlined #[track_caller] Layout::from_size_align_unchecked) { + let mut _19: bool; + let _20: (); + let mut _21: std::ptr::Alignment; + } + } + } bb0: { StorageLive(_1); StorageLive(_2); StorageLive(_3); - _3 = Box::<()>::new(const ()) -> [return: bb1, unwind continue]; + StorageLive(_11); + StorageLive(_12); + StorageLive(_13); +- _11 = SizeOf(()); +- _12 = AlignOf(()); ++ _11 = const 0_usize; ++ _12 = const 1_usize; + StorageLive(_14); + StorageLive(_16); + StorageLive(_17); + StorageLive(_19); + _19 = const false; +- switchInt(move _19) -> [0: bb8, otherwise: bb7]; ++ switchInt(const false) -> [0: bb8, otherwise: bb7]; } bb1: { + StorageDead(_3); + StorageDead(_1); + return; + } + + bb2 (cleanup): { + resume; + } + + bb3 (cleanup): { + resume; + } + + bb4: { + unreachable; + } + + bb5: { +- _18 = handle_alloc_error(move _14) -> bb3; ++ _18 = handle_alloc_error(const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}) -> bb3; + } + + bb6: { + _17 = copy ((_15 as Ok).0: std::ptr::NonNull<[u8]>); + StorageLive(_22); + _22 = copy _17 as *mut [u8] (Transmute); + _13 = copy _22 as *mut u8 (PtrToPtr); + StorageDead(_22); + StorageDead(_15); + StorageDead(_17); + StorageDead(_16); + StorageDead(_14); + _3 = ShallowInitBox(move _13, ()); + StorageDead(_13); + StorageDead(_12); + StorageDead(_11); _2 = &_3; _1 = copy _2; StorageDead(_2); @@ -54,11 +137,11 @@ StorageLive(_6); - _6 = copy _4; + _6 = copy _10; - StorageLive(_11); - _11 = const 1_usize; -- _5 = *const [()] from (copy _6, copy _11); + StorageLive(_23); + _23 = const 1_usize; +- _5 = *const [()] from (copy _6, copy _23); + _5 = *const [()] from (copy _10, const 1_usize); - StorageDead(_11); + StorageDead(_23); StorageDead(_6); StorageLive(_7); StorageLive(_8); @@ -70,17 +153,34 @@ - StorageDead(_5); + nop; StorageDead(_4); - drop(_3) -> [return: bb2, unwind: bb3]; + drop(_3) -> [return: bb1, unwind: bb2]; } - bb2: { - StorageDead(_3); - StorageDead(_1); - return; + bb7: { +- _20 = Layout::from_size_align_unchecked::precondition_check(copy _11, copy _12) -> [return: bb8, unwind unreachable]; ++ _20 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb8, unwind unreachable]; } - bb3 (cleanup): { - resume; + bb8: { + StorageDead(_19); + StorageLive(_21); +- _21 = copy _12 as std::ptr::Alignment (Transmute); +- _14 = Layout { size: copy _11, align: move _21 }; ++ _21 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0); ++ _14 = const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}; + StorageDead(_21); + StorageLive(_15); +- _15 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], copy _14, const false) -> [return: bb9, unwind: bb3]; ++ _15 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}, const false) -> [return: bb9, unwind: bb3]; + } + + bb9: { + _16 = discriminant(_15); + switchInt(move _16) -> [0: bb6, 1: bb5, otherwise: bb4]; } ++ } ++ ++ ALLOC0 (size: 8, align: 4) { ++ 01 00 00 00 00 00 00 00 │ ........ } diff --git a/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.64bit.panic-abort.diff b/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.64bit.panic-abort.diff index 839b53e3b0b3b..b9753345ace2c 100644 --- a/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.64bit.panic-abort.diff +++ b/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.64bit.panic-abort.diff @@ -23,11 +23,11 @@ scope 4 { debug _x => _7; } - scope 18 (inlined foo) { + scope 16 (inlined foo) { } } - scope 16 (inlined slice_from_raw_parts::<()>) { - scope 17 (inlined std::ptr::from_raw_parts::<[()], ()>) { + scope 17 (inlined slice_from_raw_parts::<()>) { + scope 18 (inlined std::ptr::from_raw_parts::<[()], ()>) { } } } @@ -45,15 +45,15 @@ let _17: std::ptr::NonNull<[u8]>; scope 8 { scope 11 (inlined NonNull::<[u8]>::as_mut_ptr) { - scope 12 (inlined NonNull::<[u8]>::as_non_null_ptr) { - scope 13 (inlined NonNull::<[u8]>::cast::) { + scope 12 (inlined NonNull::::as_ptr) { + } + scope 13 (inlined NonNull::<[u8]>::as_non_null_ptr) { + scope 14 (inlined NonNull::<[u8]>::cast::) { let mut _22: *mut [u8]; - scope 14 (inlined NonNull::<[u8]>::as_ptr) { + scope 15 (inlined NonNull::<[u8]>::as_ptr) { } } } - scope 15 (inlined NonNull::::as_ptr) { - } } } scope 10 (inlined ::allocate) { diff --git a/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.64bit.panic-unwind.diff b/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.64bit.panic-unwind.diff index b2085afb71379..d9b24071af031 100644 --- a/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.64bit.panic-unwind.diff +++ b/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.64bit.panic-unwind.diff @@ -10,7 +10,7 @@ let mut _8: *const [()]; let mut _9: std::boxed::Box<()>; let mut _10: *const (); - let mut _11: usize; + let mut _23: usize; scope 1 { debug vp_ctx => _1; let _4: *const (); @@ -23,24 +23,107 @@ scope 4 { debug _x => _7; } - scope 7 (inlined foo) { + scope 16 (inlined foo) { } } - scope 5 (inlined slice_from_raw_parts::<()>) { - scope 6 (inlined std::ptr::from_raw_parts::<[()], ()>) { + scope 17 (inlined slice_from_raw_parts::<()>) { + scope 18 (inlined std::ptr::from_raw_parts::<[()], ()>) { } } } } + scope 5 (inlined Box::<()>::new) { + let mut _11: usize; + let mut _12: usize; + let mut _13: *mut u8; + scope 6 (inlined alloc::alloc::exchange_malloc) { + let _14: std::alloc::Layout; + let mut _15: std::result::Result, std::alloc::AllocError>; + let mut _16: isize; + let mut _18: !; + scope 7 { + let _17: std::ptr::NonNull<[u8]>; + scope 8 { + scope 11 (inlined NonNull::<[u8]>::as_mut_ptr) { + scope 12 (inlined NonNull::::as_ptr) { + } + scope 13 (inlined NonNull::<[u8]>::as_non_null_ptr) { + scope 14 (inlined NonNull::<[u8]>::cast::) { + let mut _22: *mut [u8]; + scope 15 (inlined NonNull::<[u8]>::as_ptr) { + } + } + } + } + } + scope 10 (inlined ::allocate) { + } + } + scope 9 (inlined #[track_caller] Layout::from_size_align_unchecked) { + let mut _19: bool; + let _20: (); + let mut _21: std::ptr::Alignment; + } + } + } bb0: { StorageLive(_1); StorageLive(_2); StorageLive(_3); - _3 = Box::<()>::new(const ()) -> [return: bb1, unwind continue]; + StorageLive(_11); + StorageLive(_12); + StorageLive(_13); +- _11 = SizeOf(()); +- _12 = AlignOf(()); ++ _11 = const 0_usize; ++ _12 = const 1_usize; + StorageLive(_14); + StorageLive(_16); + StorageLive(_17); + StorageLive(_19); + _19 = const false; +- switchInt(move _19) -> [0: bb8, otherwise: bb7]; ++ switchInt(const false) -> [0: bb8, otherwise: bb7]; } bb1: { + StorageDead(_3); + StorageDead(_1); + return; + } + + bb2 (cleanup): { + resume; + } + + bb3 (cleanup): { + resume; + } + + bb4: { + unreachable; + } + + bb5: { +- _18 = handle_alloc_error(move _14) -> bb3; ++ _18 = handle_alloc_error(const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}) -> bb3; + } + + bb6: { + _17 = copy ((_15 as Ok).0: std::ptr::NonNull<[u8]>); + StorageLive(_22); + _22 = copy _17 as *mut [u8] (Transmute); + _13 = copy _22 as *mut u8 (PtrToPtr); + StorageDead(_22); + StorageDead(_15); + StorageDead(_17); + StorageDead(_16); + StorageDead(_14); + _3 = ShallowInitBox(move _13, ()); + StorageDead(_13); + StorageDead(_12); + StorageDead(_11); _2 = &_3; _1 = copy _2; StorageDead(_2); @@ -54,11 +137,11 @@ StorageLive(_6); - _6 = copy _4; + _6 = copy _10; - StorageLive(_11); - _11 = const 1_usize; -- _5 = *const [()] from (copy _6, copy _11); + StorageLive(_23); + _23 = const 1_usize; +- _5 = *const [()] from (copy _6, copy _23); + _5 = *const [()] from (copy _10, const 1_usize); - StorageDead(_11); + StorageDead(_23); StorageDead(_6); StorageLive(_7); StorageLive(_8); @@ -70,17 +153,34 @@ - StorageDead(_5); + nop; StorageDead(_4); - drop(_3) -> [return: bb2, unwind: bb3]; + drop(_3) -> [return: bb1, unwind: bb2]; } - bb2: { - StorageDead(_3); - StorageDead(_1); - return; + bb7: { +- _20 = Layout::from_size_align_unchecked::precondition_check(copy _11, copy _12) -> [return: bb8, unwind unreachable]; ++ _20 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb8, unwind unreachable]; } - bb3 (cleanup): { - resume; + bb8: { + StorageDead(_19); + StorageLive(_21); +- _21 = copy _12 as std::ptr::Alignment (Transmute); +- _14 = Layout { size: copy _11, align: move _21 }; ++ _21 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0); ++ _14 = const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}; + StorageDead(_21); + StorageLive(_15); +- _15 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], copy _14, const false) -> [return: bb9, unwind: bb3]; ++ _15 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}, const false) -> [return: bb9, unwind: bb3]; + } + + bb9: { + _16 = discriminant(_15); + switchInt(move _16) -> [0: bb6, 1: bb5, otherwise: bb4]; } ++ } ++ ++ ALLOC0 (size: 16, align: 8) { ++ 01 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 │ ................ } diff --git a/tests/mir-opt/gvn.slice_const_length.GVN.panic-abort.diff b/tests/mir-opt/gvn.slice_const_length.GVN.panic-abort.diff index 1a6204e4ac8ae..4a3a14397b2a3 100644 --- a/tests/mir-opt/gvn.slice_const_length.GVN.panic-abort.diff +++ b/tests/mir-opt/gvn.slice_const_length.GVN.panic-abort.diff @@ -15,16 +15,19 @@ debug len => _4; } } + scope 3 (inlined core::slice::::as_ptr) { + let mut _7: *const [i32]; + } bb0: { - StorageLive(_2); + nop; StorageLive(_3); _3 = &(*_1); - _2 = core::slice::::as_ptr(move _3) -> [return: bb1, unwind unreachable]; - } - - bb1: { + StorageLive(_7); + _7 = &raw const (*_3); + _2 = move _7 as *const i32 (PtrToPtr); + StorageDead(_7); StorageDead(_3); - StorageLive(_4); + nop; diff --git a/tests/mir-opt/gvn.slice_const_length.GVN.panic-unwind.diff b/tests/mir-opt/gvn.slice_const_length.GVN.panic-unwind.diff index 62d57b0fe2831..4a3a14397b2a3 100644 --- a/tests/mir-opt/gvn.slice_const_length.GVN.panic-unwind.diff +++ b/tests/mir-opt/gvn.slice_const_length.GVN.panic-unwind.diff @@ -15,16 +15,19 @@ debug len => _4; } } + scope 3 (inlined core::slice::::as_ptr) { + let mut _7: *const [i32]; + } bb0: { - StorageLive(_2); + nop; StorageLive(_3); _3 = &(*_1); - _2 = core::slice::::as_ptr(move _3) -> [return: bb1, unwind continue]; - } - - bb1: { + StorageLive(_7); + _7 = &raw const (*_3); + _2 = move _7 as *const i32 (PtrToPtr); + StorageDead(_7); StorageDead(_3); - StorageLive(_4); + nop; diff --git a/tests/mir-opt/gvn.slices.GVN.panic-abort.diff b/tests/mir-opt/gvn.slices.GVN.panic-abort.diff index 091c3bd5c7b2a..911319842a91f 100644 --- a/tests/mir-opt/gvn.slices.GVN.panic-abort.diff +++ b/tests/mir-opt/gvn.slices.GVN.panic-abort.diff @@ -77,6 +77,18 @@ debug kind => _47; } } + scope 10 (inlined core::str::::as_ptr) { + let mut _57: *const str; + } + scope 11 (inlined core::slice::::as_ptr) { + let mut _58: *const [u8]; + } + } + scope 8 (inlined core::str::::as_ptr) { + let mut _55: *const str; + } + scope 9 (inlined core::str::::as_ptr) { + let mut _56: *const str; } } } @@ -118,10 +130,10 @@ + nop; StorageLive(_11); _11 = &(*_1); - _10 = core::str::::as_ptr(move _11) -> [return: bb3, unwind unreachable]; - } - - bb3: { + StorageLive(_55); + _55 = &raw const (*_11); + _10 = move _55 as *const u8 (PtrToPtr); + StorageDead(_55); StorageDead(_11); _9 = &_10; - StorageLive(_12); @@ -131,10 +143,10 @@ StorageLive(_14); - _14 = &(*_4); + _14 = &(*_1); - _13 = core::str::::as_ptr(move _14) -> [return: bb4, unwind unreachable]; - } - - bb4: { + StorageLive(_56); + _56 = &raw const (*_14); + _13 = move _56 as *const u8 (PtrToPtr); + StorageDead(_56); StorageDead(_14); _12 = &_13; - _8 = (move _9, move _12); @@ -158,10 +170,10 @@ - _17 = Eq(move _18, move _19); + _19 = copy _13; + _17 = Eq(copy _10, copy _13); - switchInt(move _17) -> [0: bb6, otherwise: bb5]; + switchInt(move _17) -> [0: bb4, otherwise: bb3]; } - bb5: { + bb3: { StorageDead(_19); StorageDead(_18); _7 = const (); @@ -183,11 +195,11 @@ StorageLive(_31); StorageLive(_32); _32 = copy _29; -- _31 = opaque::<&[u8]>(move _32) -> [return: bb7, unwind unreachable]; -+ _31 = opaque::<&[u8]>(copy _29) -> [return: bb7, unwind unreachable]; +- _31 = opaque::<&[u8]>(move _32) -> [return: bb5, unwind unreachable]; ++ _31 = opaque::<&[u8]>(copy _29) -> [return: bb5, unwind unreachable]; } - bb6: { + bb4: { StorageDead(_19); StorageDead(_18); - StorageLive(_21); @@ -214,7 +226,7 @@ + _22 = assert_failed::<*const u8, *const u8>(const core::panicking::AssertKind::Eq, move _24, move _26, move _28) -> unwind unreachable; } - bb7: { + bb5: { StorageDead(_32); StorageDead(_31); StorageLive(_33); @@ -225,10 +237,10 @@ + nop; StorageLive(_37); _37 = &(*_1); - _36 = core::str::::as_ptr(move _37) -> [return: bb8, unwind unreachable]; - } - - bb8: { + StorageLive(_57); + _57 = &raw const (*_37); + _36 = move _57 as *const u8 (PtrToPtr); + StorageDead(_57); StorageDead(_37); _35 = &_36; - StorageLive(_38); @@ -237,10 +249,10 @@ + nop; StorageLive(_40); _40 = &(*_29); - _39 = core::slice::::as_ptr(move _40) -> [return: bb9, unwind unreachable]; - } - - bb9: { + StorageLive(_58); + _58 = &raw const (*_40); + _39 = move _58 as *const u8 (PtrToPtr); + StorageDead(_58); StorageDead(_40); _38 = &_39; - _34 = (move _35, move _38); @@ -264,10 +276,10 @@ - _43 = Eq(move _44, move _45); + _45 = copy _39; + _43 = Eq(copy _36, copy _39); - switchInt(move _43) -> [0: bb11, otherwise: bb10]; + switchInt(move _43) -> [0: bb7, otherwise: bb6]; } - bb10: { + bb6: { StorageDead(_45); StorageDead(_44); _33 = const (); @@ -289,7 +301,7 @@ return; } - bb11: { + bb7: { StorageDead(_45); StorageDead(_44); - StorageLive(_47); diff --git a/tests/mir-opt/gvn.slices.GVN.panic-unwind.diff b/tests/mir-opt/gvn.slices.GVN.panic-unwind.diff index 9768956c9c870..0f59aac426acf 100644 --- a/tests/mir-opt/gvn.slices.GVN.panic-unwind.diff +++ b/tests/mir-opt/gvn.slices.GVN.panic-unwind.diff @@ -77,6 +77,18 @@ debug kind => _47; } } + scope 10 (inlined core::str::::as_ptr) { + let mut _57: *const str; + } + scope 11 (inlined core::slice::::as_ptr) { + let mut _58: *const [u8]; + } + } + scope 8 (inlined core::str::::as_ptr) { + let mut _55: *const str; + } + scope 9 (inlined core::str::::as_ptr) { + let mut _56: *const str; } } } @@ -118,10 +130,10 @@ + nop; StorageLive(_11); _11 = &(*_1); - _10 = core::str::::as_ptr(move _11) -> [return: bb3, unwind continue]; - } - - bb3: { + StorageLive(_55); + _55 = &raw const (*_11); + _10 = move _55 as *const u8 (PtrToPtr); + StorageDead(_55); StorageDead(_11); _9 = &_10; - StorageLive(_12); @@ -131,10 +143,10 @@ StorageLive(_14); - _14 = &(*_4); + _14 = &(*_1); - _13 = core::str::::as_ptr(move _14) -> [return: bb4, unwind continue]; - } - - bb4: { + StorageLive(_56); + _56 = &raw const (*_14); + _13 = move _56 as *const u8 (PtrToPtr); + StorageDead(_56); StorageDead(_14); _12 = &_13; - _8 = (move _9, move _12); @@ -158,10 +170,10 @@ - _17 = Eq(move _18, move _19); + _19 = copy _13; + _17 = Eq(copy _10, copy _13); - switchInt(move _17) -> [0: bb6, otherwise: bb5]; + switchInt(move _17) -> [0: bb4, otherwise: bb3]; } - bb5: { + bb3: { StorageDead(_19); StorageDead(_18); _7 = const (); @@ -183,11 +195,11 @@ StorageLive(_31); StorageLive(_32); _32 = copy _29; -- _31 = opaque::<&[u8]>(move _32) -> [return: bb7, unwind continue]; -+ _31 = opaque::<&[u8]>(copy _29) -> [return: bb7, unwind continue]; +- _31 = opaque::<&[u8]>(move _32) -> [return: bb5, unwind continue]; ++ _31 = opaque::<&[u8]>(copy _29) -> [return: bb5, unwind continue]; } - bb6: { + bb4: { StorageDead(_19); StorageDead(_18); - StorageLive(_21); @@ -214,7 +226,7 @@ + _22 = assert_failed::<*const u8, *const u8>(const core::panicking::AssertKind::Eq, move _24, move _26, move _28) -> unwind continue; } - bb7: { + bb5: { StorageDead(_32); StorageDead(_31); StorageLive(_33); @@ -225,10 +237,10 @@ + nop; StorageLive(_37); _37 = &(*_1); - _36 = core::str::::as_ptr(move _37) -> [return: bb8, unwind continue]; - } - - bb8: { + StorageLive(_57); + _57 = &raw const (*_37); + _36 = move _57 as *const u8 (PtrToPtr); + StorageDead(_57); StorageDead(_37); _35 = &_36; - StorageLive(_38); @@ -237,10 +249,10 @@ + nop; StorageLive(_40); _40 = &(*_29); - _39 = core::slice::::as_ptr(move _40) -> [return: bb9, unwind continue]; - } - - bb9: { + StorageLive(_58); + _58 = &raw const (*_40); + _39 = move _58 as *const u8 (PtrToPtr); + StorageDead(_58); StorageDead(_40); _38 = &_39; - _34 = (move _35, move _38); @@ -264,10 +276,10 @@ - _43 = Eq(move _44, move _45); + _45 = copy _39; + _43 = Eq(copy _36, copy _39); - switchInt(move _43) -> [0: bb11, otherwise: bb10]; + switchInt(move _43) -> [0: bb7, otherwise: bb6]; } - bb10: { + bb6: { StorageDead(_45); StorageDead(_44); _33 = const (); @@ -289,7 +301,7 @@ return; } - bb11: { + bb7: { StorageDead(_45); StorageDead(_44); - StorageLive(_47); diff --git a/tests/mir-opt/gvn_ptr_eq_with_constant.main.GVN.diff b/tests/mir-opt/gvn_ptr_eq_with_constant.main.GVN.diff index f56af33ea603f..16dfbe0c1546e 100644 --- a/tests/mir-opt/gvn_ptr_eq_with_constant.main.GVN.diff +++ b/tests/mir-opt/gvn_ptr_eq_with_constant.main.GVN.diff @@ -6,27 +6,27 @@ let _1: bool; let mut _2: *mut u8; scope 1 (inlined dangling_mut::) { - scope 2 (inlined NonNull::::dangling) { + scope 2 (inlined NonNull::::as_ptr) { + } + scope 3 (inlined NonNull::::dangling) { let mut _3: std::num::NonZero; - scope 3 { - scope 5 (inlined std::ptr::Alignment::as_nonzero) { + scope 4 { + scope 6 (inlined std::ptr::Alignment::as_nonzero) { } - scope 6 (inlined NonNull::::without_provenance) { - scope 7 { - } - scope 8 (inlined NonZero::::get) { + scope 7 (inlined NonNull::::without_provenance) { + scope 8 { } scope 9 (inlined std::ptr::without_provenance::) { scope 10 (inlined without_provenance_mut::) { } } + scope 11 (inlined NonZero::::get) { + } } } - scope 4 (inlined std::ptr::Alignment::of::) { + scope 5 (inlined std::ptr::Alignment::of::) { } } - scope 11 (inlined NonNull::::as_ptr) { - } } scope 12 (inlined Foo::::cmp_ptr) { let mut _4: *const u8; diff --git a/tests/mir-opt/inline/inline_coroutine.main.Inline.panic-abort.diff b/tests/mir-opt/inline/inline_coroutine.main.Inline.panic-abort.diff index 151580da19e09..0d9be495a0574 100644 --- a/tests/mir-opt/inline/inline_coroutine.main.Inline.panic-abort.diff +++ b/tests/mir-opt/inline/inline_coroutine.main.Inline.panic-abort.diff @@ -11,12 +11,12 @@ scope 1 { debug _r => _1; } -+ scope 2 (inlined g) { -+ } -+ scope 3 (inlined Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}>::new) { -+ debug pointer => _3; -+ scope 4 (inlined Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}>::new_unchecked) { -+ } + scope 2 (inlined Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}>::new) { + debug pointer => _3; + scope 3 (inlined Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}>::new_unchecked) { + } + } ++ scope 4 (inlined g) { + } + scope 5 (inlined g::{closure#0}) { + debug a => _5; @@ -31,10 +31,14 @@ StorageLive(_3); StorageLive(_4); - _4 = g() -> [return: bb1, unwind unreachable]; +- } +- +- bb1: { + _4 = {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8 (#0)}; -+ _3 = &mut _4; -+ _2 = Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}> { pointer: copy _3 }; -+ StorageDead(_3); + _3 = &mut _4; + _2 = Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}> { pointer: copy _3 }; + StorageDead(_3); +- _1 = <{coroutine@$DIR/inline_coroutine.rs:20:5: 20:8} as Coroutine>::resume(move _2, const false) -> [return: bb2, unwind unreachable]; + StorageLive(_5); + _5 = const false; + StorageLive(_6); @@ -44,37 +48,32 @@ + switchInt(move _7) -> [0: bb3, 1: bb7, 3: bb8, otherwise: bb9]; } - bb1: { -- _3 = &mut _4; -- _2 = Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}>::new(move _3) -> [return: bb2, unwind unreachable]; ++ bb1: { + StorageDead(_4); + _0 = const (); + StorageDead(_1); + return; - } - ++ } ++ bb2: { -- StorageDead(_3); -- _1 = <{coroutine@$DIR/inline_coroutine.rs:20:5: 20:8} as Coroutine>::resume(move _2, const false) -> [return: bb3, unwind unreachable]; + StorageDead(_7); + StorageDead(_6); + StorageDead(_5); -+ StorageDead(_2); + StorageDead(_2); +- drop(_4) -> [return: bb3, unwind unreachable]; + drop(_4) -> [return: bb1, unwind unreachable]; } bb3: { -- StorageDead(_2); -- drop(_4) -> [return: bb4, unwind unreachable]; -+ StorageLive(_8); -+ switchInt(copy _5) -> [0: bb4, otherwise: bb5]; - } - - bb4: { - StorageDead(_4); - _0 = const (); - StorageDead(_1); - return; ++ StorageLive(_8); ++ switchInt(copy _5) -> [0: bb4, otherwise: bb5]; ++ } ++ ++ bb4: { + _8 = const 13_i32; + goto -> bb6; + } diff --git a/tests/mir-opt/inline/inline_coroutine.main.Inline.panic-unwind.diff b/tests/mir-opt/inline/inline_coroutine.main.Inline.panic-unwind.diff index 6196fc0d0c6bf..55ef77073037f 100644 --- a/tests/mir-opt/inline/inline_coroutine.main.Inline.panic-unwind.diff +++ b/tests/mir-opt/inline/inline_coroutine.main.Inline.panic-unwind.diff @@ -11,12 +11,12 @@ scope 1 { debug _r => _1; } -+ scope 2 (inlined g) { -+ } -+ scope 3 (inlined Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}>::new) { -+ debug pointer => _3; -+ scope 4 (inlined Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}>::new_unchecked) { -+ } + scope 2 (inlined Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}>::new) { + debug pointer => _3; + scope 3 (inlined Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}>::new_unchecked) { + } + } ++ scope 4 (inlined g) { + } + scope 5 (inlined g::{closure#0}) { + debug a => _5; @@ -31,10 +31,14 @@ StorageLive(_3); StorageLive(_4); - _4 = g() -> [return: bb1, unwind continue]; +- } +- +- bb1: { + _4 = {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8 (#0)}; -+ _3 = &mut _4; -+ _2 = Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}> { pointer: copy _3 }; -+ StorageDead(_3); + _3 = &mut _4; + _2 = Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}> { pointer: copy _3 }; + StorageDead(_3); +- _1 = <{coroutine@$DIR/inline_coroutine.rs:20:5: 20:8} as Coroutine>::resume(move _2, const false) -> [return: bb2, unwind: bb4]; + StorageLive(_5); + _5 = const false; + StorageLive(_6); @@ -44,50 +48,43 @@ + switchInt(move _7) -> [0: bb5, 1: bb9, 3: bb10, otherwise: bb11]; } - bb1: { -- _3 = &mut _4; -- _2 = Pin::<&mut {coroutine@$DIR/inline_coroutine.rs:20:5: 20:8}>::new(move _3) -> [return: bb2, unwind: bb5]; -+ StorageDead(_4); -+ _0 = const (); -+ StorageDead(_1); -+ return; +- bb2: { +- StorageDead(_2); +- drop(_4) -> [return: bb3, unwind: bb5]; +- } +- +- bb3: { ++ bb1: { + StorageDead(_4); + _0 = const (); + StorageDead(_1); + return; } -- bb2: { -- StorageDead(_3); -- _1 = <{coroutine@$DIR/inline_coroutine.rs:20:5: 20:8} as Coroutine>::resume(move _2, const false) -> [return: bb3, unwind: bb5]; +- bb4 (cleanup): { +- drop(_4) -> [return: bb5, unwind terminate(cleanup)]; + bb2 (cleanup): { + drop(_4) -> [return: bb3, unwind terminate(cleanup)]; } -- bb3: { -- StorageDead(_2); -- drop(_4) -> [return: bb4, unwind: bb6]; +- bb5 (cleanup): { + bb3 (cleanup): { -+ resume; - } - - bb4: { -- StorageDead(_4); -- _0 = const (); -- StorageDead(_1); -- return; + resume; ++ } ++ ++ bb4: { + StorageDead(_7); + StorageDead(_6); + StorageDead(_5); + StorageDead(_2); + drop(_4) -> [return: bb1, unwind: bb3]; - } - -- bb5 (cleanup): { -- drop(_4) -> [return: bb6, unwind terminate(cleanup)]; ++ } ++ + bb5: { + StorageLive(_8); + switchInt(copy _5) -> [0: bb6, otherwise: bb7]; - } - -- bb6 (cleanup): { -- resume; ++ } ++ + bb6: { + _8 = const 13_i32; + goto -> bb8; diff --git a/tests/mir-opt/inline/inline_shims.drop.Inline.panic-abort.diff b/tests/mir-opt/inline/inline_shims.drop.Inline.panic-abort.diff index f6c111a2228a9..56faac5c559b7 100644 --- a/tests/mir-opt/inline/inline_shims.drop.Inline.panic-abort.diff +++ b/tests/mir-opt/inline/inline_shims.drop.Inline.panic-abort.diff @@ -18,19 +18,19 @@ + scope 3 (inlined Vec::::as_mut_ptr) { + scope 4 (inlined alloc::raw_vec::RawVec::::ptr) { + scope 5 (inlined alloc::raw_vec::RawVecInner::ptr::) { -+ scope 6 (inlined alloc::raw_vec::RawVecInner::non_null::) { ++ scope 6 (inlined NonNull::::as_ptr) { ++ } ++ scope 7 (inlined alloc::raw_vec::RawVecInner::non_null::) { + let mut _11: std::ptr::NonNull; -+ scope 7 (inlined Unique::::cast::) { -+ scope 8 (inlined NonNull::::cast::) { -+ scope 9 (inlined NonNull::::as_ptr) { ++ scope 8 (inlined Unique::::cast::) { ++ scope 9 (inlined NonNull::::cast::) { ++ scope 10 (inlined NonNull::::as_ptr) { + } + } + } -+ scope 10 (inlined Unique::::as_non_null_ptr) { ++ scope 11 (inlined Unique::::as_non_null_ptr) { + } + } -+ scope 11 (inlined NonNull::::as_ptr) { -+ } + } + } + } diff --git a/tests/mir-opt/inline/unchecked_shifts.rs b/tests/mir-opt/inline/unchecked_shifts.rs index 122f099da4b7d..dc3db4acab7e3 100644 --- a/tests/mir-opt/inline/unchecked_shifts.rs +++ b/tests/mir-opt/inline/unchecked_shifts.rs @@ -7,7 +7,7 @@ // These used to be more interesting when the library had to fix the RHS type. // After MCP#693, though, that's the backend's problem, not something in MIR. -// EMIT_MIR unchecked_shifts.unchecked_shl_unsigned_smaller.Inline.diff +// EMIT_MIR unchecked_shifts.unchecked_shl_unsigned_smaller.ForceInline.diff // EMIT_MIR unchecked_shifts.unchecked_shl_unsigned_smaller.PreCodegen.after.mir pub unsafe fn unchecked_shl_unsigned_smaller(a: u16, b: u32) -> u16 { // CHECK-LABEL: fn unchecked_shl_unsigned_smaller( @@ -15,7 +15,7 @@ pub unsafe fn unchecked_shl_unsigned_smaller(a: u16, b: u32) -> u16 { a.unchecked_shl(b) } -// EMIT_MIR unchecked_shifts.unchecked_shr_signed_bigger.Inline.diff +// EMIT_MIR unchecked_shifts.unchecked_shr_signed_bigger.ForceInline.diff // EMIT_MIR unchecked_shifts.unchecked_shr_signed_bigger.PreCodegen.after.mir pub unsafe fn unchecked_shr_signed_bigger(a: i64, b: u32) -> i64 { // CHECK-LABEL: fn unchecked_shr_signed_bigger( diff --git a/tests/mir-opt/inline/unchecked_shifts.unchecked_shl_unsigned_smaller.Inline.panic-abort.diff b/tests/mir-opt/inline/unchecked_shifts.unchecked_shl_unsigned_smaller.ForceInline.panic-abort.diff similarity index 90% rename from tests/mir-opt/inline/unchecked_shifts.unchecked_shl_unsigned_smaller.Inline.panic-abort.diff rename to tests/mir-opt/inline/unchecked_shifts.unchecked_shl_unsigned_smaller.ForceInline.panic-abort.diff index 813796657b247..ec7714ff9629f 100644 --- a/tests/mir-opt/inline/unchecked_shifts.unchecked_shl_unsigned_smaller.Inline.panic-abort.diff +++ b/tests/mir-opt/inline/unchecked_shifts.unchecked_shl_unsigned_smaller.ForceInline.panic-abort.diff @@ -1,5 +1,5 @@ -- // MIR for `unchecked_shl_unsigned_smaller` before Inline -+ // MIR for `unchecked_shl_unsigned_smaller` after Inline +- // MIR for `unchecked_shl_unsigned_smaller` before ForceInline ++ // MIR for `unchecked_shl_unsigned_smaller` after ForceInline fn unchecked_shl_unsigned_smaller(_1: u16, _2: u32) -> u16 { debug a => _1; diff --git a/tests/mir-opt/inline/unchecked_shifts.unchecked_shl_unsigned_smaller.Inline.panic-unwind.diff b/tests/mir-opt/inline/unchecked_shifts.unchecked_shl_unsigned_smaller.ForceInline.panic-unwind.diff similarity index 90% rename from tests/mir-opt/inline/unchecked_shifts.unchecked_shl_unsigned_smaller.Inline.panic-unwind.diff rename to tests/mir-opt/inline/unchecked_shifts.unchecked_shl_unsigned_smaller.ForceInline.panic-unwind.diff index 61fdb69f74b70..28445ab85b0e1 100644 --- a/tests/mir-opt/inline/unchecked_shifts.unchecked_shl_unsigned_smaller.Inline.panic-unwind.diff +++ b/tests/mir-opt/inline/unchecked_shifts.unchecked_shl_unsigned_smaller.ForceInline.panic-unwind.diff @@ -1,5 +1,5 @@ -- // MIR for `unchecked_shl_unsigned_smaller` before Inline -+ // MIR for `unchecked_shl_unsigned_smaller` after Inline +- // MIR for `unchecked_shl_unsigned_smaller` before ForceInline ++ // MIR for `unchecked_shl_unsigned_smaller` after ForceInline fn unchecked_shl_unsigned_smaller(_1: u16, _2: u32) -> u16 { debug a => _1; diff --git a/tests/mir-opt/inline/unchecked_shifts.unchecked_shr_signed_bigger.Inline.panic-abort.diff b/tests/mir-opt/inline/unchecked_shifts.unchecked_shr_signed_bigger.ForceInline.panic-abort.diff similarity index 90% rename from tests/mir-opt/inline/unchecked_shifts.unchecked_shr_signed_bigger.Inline.panic-abort.diff rename to tests/mir-opt/inline/unchecked_shifts.unchecked_shr_signed_bigger.ForceInline.panic-abort.diff index 5ea99e8301b80..1dfaff799df86 100644 --- a/tests/mir-opt/inline/unchecked_shifts.unchecked_shr_signed_bigger.Inline.panic-abort.diff +++ b/tests/mir-opt/inline/unchecked_shifts.unchecked_shr_signed_bigger.ForceInline.panic-abort.diff @@ -1,5 +1,5 @@ -- // MIR for `unchecked_shr_signed_bigger` before Inline -+ // MIR for `unchecked_shr_signed_bigger` after Inline +- // MIR for `unchecked_shr_signed_bigger` before ForceInline ++ // MIR for `unchecked_shr_signed_bigger` after ForceInline fn unchecked_shr_signed_bigger(_1: i64, _2: u32) -> i64 { debug a => _1; diff --git a/tests/mir-opt/inline/unchecked_shifts.unchecked_shr_signed_bigger.Inline.panic-unwind.diff b/tests/mir-opt/inline/unchecked_shifts.unchecked_shr_signed_bigger.ForceInline.panic-unwind.diff similarity index 90% rename from tests/mir-opt/inline/unchecked_shifts.unchecked_shr_signed_bigger.Inline.panic-unwind.diff rename to tests/mir-opt/inline/unchecked_shifts.unchecked_shr_signed_bigger.ForceInline.panic-unwind.diff index b13531ab148f2..ac3791ab7eb21 100644 --- a/tests/mir-opt/inline/unchecked_shifts.unchecked_shr_signed_bigger.Inline.panic-unwind.diff +++ b/tests/mir-opt/inline/unchecked_shifts.unchecked_shr_signed_bigger.ForceInline.panic-unwind.diff @@ -1,5 +1,5 @@ -- // MIR for `unchecked_shr_signed_bigger` before Inline -+ // MIR for `unchecked_shr_signed_bigger` after Inline +- // MIR for `unchecked_shr_signed_bigger` before ForceInline ++ // MIR for `unchecked_shr_signed_bigger` after ForceInline fn unchecked_shr_signed_bigger(_1: i64, _2: u32) -> i64 { debug a => _1; diff --git a/tests/mir-opt/inline_coroutine_body.run2-{closure#0}.Inline.panic-abort.diff b/tests/mir-opt/inline_coroutine_body.run2-{closure#0}.Inline.panic-abort.diff index 22e6ea722ddaf..e59d38aeb13aa 100644 --- a/tests/mir-opt/inline_coroutine_body.run2-{closure#0}.Inline.panic-abort.diff +++ b/tests/mir-opt/inline_coroutine_body.run2-{closure#0}.Inline.panic-abort.diff @@ -55,7 +55,7 @@ + let _26: (); + scope 9 { + } -+ scope 12 (inlined Pin::<&mut std::future::Ready<()>>::new_unchecked) { ++ scope 10 (inlined Pin::<&mut std::future::Ready<()>>::new_unchecked) { + } + scope 13 (inlined as Future>::poll) { + let mut _42: (); @@ -89,18 +89,18 @@ + } + } + } -+ scope 10 (inlined ready::<()>) { ++ scope 11 (inlined ready::<()>) { + let mut _41: std::option::Option<()>; + } -+ scope 11 (inlined as IntoFuture>::into_future) { ++ scope 12 (inlined as IntoFuture>::into_future) { + } + } + } } -+ scope 5 (inlined Pin::<&mut {async fn body of ActionPermit<'_, T>::perform()}>::new_unchecked) { -+ } + scope 4 (inlined Pin::<&mut {async fn body of ActionPermit<'_, T>::perform()}>::new_unchecked) { + } } -+ scope 4 (inlined ActionPermit::<'_, T>::perform) { ++ scope 5 (inlined ActionPermit::<'_, T>::perform) { + } bb0: { @@ -116,11 +116,7 @@ StorageLive(_4); StorageLive(_5); _5 = &mut _2; -- _4 = Pin::<&mut {async fn body of ActionPermit<'_, T>::perform()}>::new_unchecked(move _5) -> [return: bb2, unwind unreachable]; -- } -- -- bb2: { -+ _4 = Pin::<&mut {async fn body of ActionPermit<'_, T>::perform()}> { pointer: copy _5 }; + _4 = Pin::<&mut {async fn body of ActionPermit<'_, T>::perform()}> { pointer: copy _5 }; StorageDead(_5); StorageLive(_6); StorageLive(_7); @@ -129,7 +125,7 @@ StorageLive(_9); _10 = deref_copy (_1.1: &mut std::task::Context<'_>); _9 = &mut (*_10); -- _7 = <{async fn body of ActionPermit<'_, T>::perform()} as Future>::poll(move _8, move _9) -> [return: bb3, unwind unreachable]; +- _7 = <{async fn body of ActionPermit<'_, T>::perform()} as Future>::poll(move _8, move _9) -> [return: bb2, unwind unreachable]; + StorageLive(_11); + StorageLive(_15); + StorageLive(_16); @@ -151,13 +147,12 @@ + switchInt(move _32) -> [0: bb3, 1: bb10, 3: bb9, otherwise: bb5]; } -- bb3: { + bb1: { + StorageDead(_2); + return; + } + -+ bb2: { + bb2: { + StorageDead(_40); + StorageDead(_39); + StorageDead(_38); @@ -181,11 +176,13 @@ StorageDead(_6); _0 = const (); StorageDead(_4); -- drop(_2) -> [return: bb4, unwind unreachable]; +- drop(_2) -> [return: bb3, unwind unreachable]; + drop(_2) -> [return: bb1, unwind unreachable]; } -+ bb3: { + bb3: { +- StorageDead(_2); +- return; + _31 = move _9; + _34 = deref_copy (_8.0: &mut {async fn body of ActionPermit<'_, T>::perform()}); + _35 = deref_copy (_8.0: &mut {async fn body of ActionPermit<'_, T>::perform()}); @@ -204,11 +201,9 @@ + _36 = deref_copy (_8.0: &mut {async fn body of ActionPermit<'_, T>::perform()}); + (((*_36) as variant#3).1: std::future::Ready<()>) = move _12; + goto -> bb4; -+ } + } + - bb4: { -- StorageDead(_2); -- return; ++ bb4: { + StorageLive(_17); + StorageLive(_18); + StorageLive(_19); @@ -247,7 +242,7 @@ + StorageLive(_49); + _49 = discriminant(_43); + switchInt(move _49) -> [0: bb11, 1: bb12, otherwise: bb5]; - } ++ } + + bb5: { + unreachable; diff --git a/tests/mir-opt/inline_coroutine_body.run2-{closure#0}.Inline.panic-unwind.diff b/tests/mir-opt/inline_coroutine_body.run2-{closure#0}.Inline.panic-unwind.diff index 8b027e988b8e8..977aabebdc97b 100644 --- a/tests/mir-opt/inline_coroutine_body.run2-{closure#0}.Inline.panic-unwind.diff +++ b/tests/mir-opt/inline_coroutine_body.run2-{closure#0}.Inline.panic-unwind.diff @@ -57,7 +57,7 @@ + let _26: (); + scope 9 { + } -+ scope 12 (inlined Pin::<&mut std::future::Ready<()>>::new_unchecked) { ++ scope 10 (inlined Pin::<&mut std::future::Ready<()>>::new_unchecked) { + } + scope 13 (inlined as Future>::poll) { + let mut _44: (); @@ -91,25 +91,25 @@ + } + } + } -+ scope 10 (inlined ready::<()>) { ++ scope 11 (inlined ready::<()>) { + let mut _43: std::option::Option<()>; + } -+ scope 11 (inlined as IntoFuture>::into_future) { ++ scope 12 (inlined as IntoFuture>::into_future) { + } + } + } } -+ scope 5 (inlined Pin::<&mut {async fn body of ActionPermit<'_, T>::perform()}>::new_unchecked) { -+ } + scope 4 (inlined Pin::<&mut {async fn body of ActionPermit<'_, T>::perform()}>::new_unchecked) { + } } -+ scope 4 (inlined ActionPermit::<'_, T>::perform) { ++ scope 5 (inlined ActionPermit::<'_, T>::perform) { + } bb0: { StorageLive(_2); StorageLive(_3); _3 = move (_1.0: ActionPermit<'_, T>); -- _2 = ActionPermit::<'_, T>::perform(move _3) -> [return: bb1, unwind: bb6]; +- _2 = ActionPermit::<'_, T>::perform(move _3) -> [return: bb1, unwind: bb5]; - } - - bb1: { @@ -118,11 +118,7 @@ StorageLive(_4); StorageLive(_5); _5 = &mut _2; -- _4 = Pin::<&mut {async fn body of ActionPermit<'_, T>::perform()}>::new_unchecked(move _5) -> [return: bb2, unwind: bb5]; -- } -- -- bb2: { -+ _4 = Pin::<&mut {async fn body of ActionPermit<'_, T>::perform()}> { pointer: copy _5 }; + _4 = Pin::<&mut {async fn body of ActionPermit<'_, T>::perform()}> { pointer: copy _5 }; StorageDead(_5); StorageLive(_6); StorageLive(_7); @@ -131,7 +127,7 @@ StorageLive(_9); _10 = deref_copy (_1.1: &mut std::task::Context<'_>); _9 = &mut (*_10); -- _7 = <{async fn body of ActionPermit<'_, T>::perform()} as Future>::poll(move _8, move _9) -> [return: bb3, unwind: bb5]; +- _7 = <{async fn body of ActionPermit<'_, T>::perform()} as Future>::poll(move _8, move _9) -> [return: bb2, unwind: bb4]; + StorageLive(_11); + StorageLive(_15); + StorageLive(_16); @@ -155,7 +151,7 @@ + switchInt(move _32) -> [0: bb5, 1: bb15, 2: bb14, 3: bb13, otherwise: bb7]; } -- bb3: { +- bb2: { + bb1: { + StorageDead(_2); + return; @@ -195,11 +191,11 @@ StorageDead(_6); _0 = const (); StorageDead(_4); -- drop(_2) -> [return: bb4, unwind: bb6]; +- drop(_2) -> [return: bb3, unwind: bb5]; + drop(_2) -> [return: bb1, unwind: bb3]; } -- bb4: { +- bb3: { - StorageDead(_2); - return; + bb5: { @@ -223,8 +219,8 @@ + goto -> bb6; } -- bb5 (cleanup): { -- drop(_2) -> [return: bb6, unwind terminate(cleanup)]; +- bb4 (cleanup): { +- drop(_2) -> [return: bb5, unwind terminate(cleanup)]; + bb6: { + StorageLive(_17); + StorageLive(_18); @@ -266,7 +262,7 @@ + switchInt(move _51) -> [0: bb16, 1: bb17, otherwise: bb7]; } -- bb6 (cleanup): { +- bb5 (cleanup): { - resume; + bb7: { + unreachable; diff --git a/tests/mir-opt/issue_101973.inner.GVN.panic-abort.diff b/tests/mir-opt/issue_101973.inner.GVN.panic-abort.diff index ac88fe67bb86f..ad3ee9766e8fe 100644 --- a/tests/mir-opt/issue_101973.inner.GVN.panic-abort.diff +++ b/tests/mir-opt/issue_101973.inner.GVN.panic-abort.diff @@ -16,13 +16,13 @@ let mut _11: bool; let mut _12: u32; let mut _13: bool; - scope 1 (inlined imm8) { + scope 1 (inlined core::num::::rotate_right) { + } + scope 2 (inlined imm8) { let mut _14: u32; - scope 2 { + scope 3 { } } - scope 3 (inlined core::num::::rotate_right) { - } bb0: { StorageLive(_2); diff --git a/tests/mir-opt/issue_101973.inner.GVN.panic-unwind.diff b/tests/mir-opt/issue_101973.inner.GVN.panic-unwind.diff index 96c3cae2d334a..001f0d2c7338f 100644 --- a/tests/mir-opt/issue_101973.inner.GVN.panic-unwind.diff +++ b/tests/mir-opt/issue_101973.inner.GVN.panic-unwind.diff @@ -16,13 +16,13 @@ let mut _11: bool; let mut _12: u32; let mut _13: bool; - scope 1 (inlined imm8) { + scope 1 (inlined core::num::::rotate_right) { + } + scope 2 (inlined imm8) { let mut _14: u32; - scope 2 { + scope 3 { } } - scope 3 (inlined core::num::::rotate_right) { - } bb0: { StorageLive(_2); diff --git a/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.32bit.panic-abort.mir b/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.32bit.panic-abort.mir index b5c23822162c9..86483638c95f6 100644 --- a/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.32bit.panic-abort.mir +++ b/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.32bit.panic-abort.mir @@ -4,20 +4,20 @@ fn num_to_digit(_1: char) -> u32 { debug num => _1; let mut _0: u32; let mut _4: std::option::Option; - scope 1 (inlined char::methods::::is_digit) { + scope 1 (inlined #[track_caller] Option::::unwrap) { + let mut _5: isize; + let mut _6: !; + scope 2 { + } + } + scope 3 (inlined char::methods::::is_digit) { let _2: std::option::Option; - scope 2 (inlined Option::::is_some) { + scope 4 (inlined Option::::is_some) { let mut _3: isize; - scope 3 { + scope 5 { } } } - scope 4 (inlined #[track_caller] Option::::unwrap) { - let mut _5: isize; - let mut _6: !; - scope 5 { - } - } bb0: { StorageLive(_2); diff --git a/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.32bit.panic-unwind.mir b/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.32bit.panic-unwind.mir index f22b8835735d1..d640b821d6e63 100644 --- a/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.32bit.panic-unwind.mir +++ b/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.32bit.panic-unwind.mir @@ -4,20 +4,20 @@ fn num_to_digit(_1: char) -> u32 { debug num => _1; let mut _0: u32; let mut _4: std::option::Option; - scope 1 (inlined char::methods::::is_digit) { + scope 1 (inlined #[track_caller] Option::::unwrap) { + let mut _5: isize; + let mut _6: !; + scope 2 { + } + } + scope 3 (inlined char::methods::::is_digit) { let _2: std::option::Option; - scope 2 (inlined Option::::is_some) { + scope 4 (inlined Option::::is_some) { let mut _3: isize; - scope 3 { + scope 5 { } } } - scope 4 (inlined #[track_caller] Option::::unwrap) { - let mut _5: isize; - let mut _6: !; - scope 5 { - } - } bb0: { StorageLive(_2); diff --git a/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.64bit.panic-abort.mir b/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.64bit.panic-abort.mir index b5c23822162c9..86483638c95f6 100644 --- a/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.64bit.panic-abort.mir +++ b/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.64bit.panic-abort.mir @@ -4,20 +4,20 @@ fn num_to_digit(_1: char) -> u32 { debug num => _1; let mut _0: u32; let mut _4: std::option::Option; - scope 1 (inlined char::methods::::is_digit) { + scope 1 (inlined #[track_caller] Option::::unwrap) { + let mut _5: isize; + let mut _6: !; + scope 2 { + } + } + scope 3 (inlined char::methods::::is_digit) { let _2: std::option::Option; - scope 2 (inlined Option::::is_some) { + scope 4 (inlined Option::::is_some) { let mut _3: isize; - scope 3 { + scope 5 { } } } - scope 4 (inlined #[track_caller] Option::::unwrap) { - let mut _5: isize; - let mut _6: !; - scope 5 { - } - } bb0: { StorageLive(_2); diff --git a/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.64bit.panic-unwind.mir b/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.64bit.panic-unwind.mir index f22b8835735d1..d640b821d6e63 100644 --- a/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.64bit.panic-unwind.mir +++ b/tests/mir-opt/issues/issue_59352.num_to_digit.PreCodegen.after.64bit.panic-unwind.mir @@ -4,20 +4,20 @@ fn num_to_digit(_1: char) -> u32 { debug num => _1; let mut _0: u32; let mut _4: std::option::Option; - scope 1 (inlined char::methods::::is_digit) { + scope 1 (inlined #[track_caller] Option::::unwrap) { + let mut _5: isize; + let mut _6: !; + scope 2 { + } + } + scope 3 (inlined char::methods::::is_digit) { let _2: std::option::Option; - scope 2 (inlined Option::::is_some) { + scope 4 (inlined Option::::is_some) { let mut _3: isize; - scope 3 { + scope 5 { } } } - scope 4 (inlined #[track_caller] Option::::unwrap) { - let mut _5: isize; - let mut _6: !; - scope 5 { - } - } bb0: { StorageLive(_2); diff --git a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-abort.mir b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-abort.mir index ba6ce0ee5286f..30e5773ee82e0 100644 --- a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-abort.mir @@ -26,9 +26,9 @@ fn generic_in_place(_1: *mut Box<[T]>) -> () { } scope 18 (inlined ::deallocate) { let mut _9: *mut u8; - scope 19 (inlined Layout::size) { + scope 19 (inlined NonNull::::as_ptr) { } - scope 20 (inlined NonNull::::as_ptr) { + scope 20 (inlined Layout::size) { } scope 21 (inlined std::alloc::dealloc) { let mut _10: usize; diff --git a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-unwind.mir b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-unwind.mir index ba6ce0ee5286f..30e5773ee82e0 100644 --- a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-unwind.mir @@ -26,9 +26,9 @@ fn generic_in_place(_1: *mut Box<[T]>) -> () { } scope 18 (inlined ::deallocate) { let mut _9: *mut u8; - scope 19 (inlined Layout::size) { + scope 19 (inlined NonNull::::as_ptr) { } - scope 20 (inlined NonNull::::as_ptr) { + scope 20 (inlined Layout::size) { } scope 21 (inlined std::alloc::dealloc) { let mut _10: usize; diff --git a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-abort.mir b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-abort.mir index ba6ce0ee5286f..30e5773ee82e0 100644 --- a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-abort.mir @@ -26,9 +26,9 @@ fn generic_in_place(_1: *mut Box<[T]>) -> () { } scope 18 (inlined ::deallocate) { let mut _9: *mut u8; - scope 19 (inlined Layout::size) { + scope 19 (inlined NonNull::::as_ptr) { } - scope 20 (inlined NonNull::::as_ptr) { + scope 20 (inlined Layout::size) { } scope 21 (inlined std::alloc::dealloc) { let mut _10: usize; diff --git a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-unwind.mir b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-unwind.mir index ba6ce0ee5286f..30e5773ee82e0 100644 --- a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-unwind.mir @@ -26,9 +26,9 @@ fn generic_in_place(_1: *mut Box<[T]>) -> () { } scope 18 (inlined ::deallocate) { let mut _9: *mut u8; - scope 19 (inlined Layout::size) { + scope 19 (inlined NonNull::::as_ptr) { } - scope 20 (inlined NonNull::::as_ptr) { + scope 20 (inlined Layout::size) { } scope 21 (inlined std::alloc::dealloc) { let mut _10: usize; diff --git a/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.32bit.panic-abort.diff b/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.32bit.panic-abort.diff index c2d144c98c3a3..8f1eeee3c07c3 100644 --- a/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.32bit.panic-abort.diff +++ b/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.32bit.panic-abort.diff @@ -17,20 +17,20 @@ scope 2 { debug ptr => _3; } - scope 5 (inlined ::allocate) { - } - scope 6 (inlined #[track_caller] Result::, std::alloc::AllocError>::unwrap) { + scope 5 (inlined #[track_caller] Result::, std::alloc::AllocError>::unwrap) { let mut _12: isize; let _13: std::alloc::AllocError; let mut _14: !; let mut _15: &dyn std::fmt::Debug; let _16: &std::alloc::AllocError; - scope 7 { + scope 6 { } - scope 8 { + scope 7 { } } - scope 9 (inlined NonNull::<[u8]>::as_ptr) { + scope 8 (inlined NonNull::<[u8]>::as_ptr) { + } + scope 9 (inlined ::allocate) { } } scope 3 (inlined #[track_caller] Option::::unwrap) { @@ -75,28 +75,19 @@ _7 = copy _9; StorageLive(_8); - _8 = copy _1; -- _6 = std::alloc::Global::alloc_impl(move _7, move _8, const false) -> [return: bb4, unwind unreachable]; +- _6 = std::alloc::Global::alloc_impl(move _7, move _8, const false) -> [return: bb6, unwind unreachable]; + _8 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }}; -+ _6 = std::alloc::Global::alloc_impl(copy _9, const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb4, unwind unreachable]; ++ _6 = std::alloc::Global::alloc_impl(copy _9, const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb6, unwind unreachable]; } bb4: { - StorageDead(_8); - StorageDead(_7); - StorageLive(_12); - StorageLive(_16); - _12 = discriminant(_6); - switchInt(move _12) -> [0: bb6, 1: bb5, otherwise: bb1]; - } - - bb5: { StorageLive(_15); _16 = &_13; _15 = copy _16 as &dyn std::fmt::Debug (PointerCoercion(Unsize, Implicit)); _14 = result::unwrap_failed(const "called `Result::unwrap()` on an `Err` value", move _15) -> unwind unreachable; } - bb6: { + bb5: { _5 = move ((_6 as Ok).0: std::ptr::NonNull<[u8]>); StorageDead(_16); StorageDead(_12); @@ -110,6 +101,15 @@ + nop; return; } + + bb6: { + StorageDead(_8); + StorageDead(_7); + StorageLive(_12); + StorageLive(_16); + _12 = discriminant(_6); + switchInt(move _12) -> [0: bb5, 1: bb4, otherwise: bb1]; + } } - ALLOC0 (size: 43, align: 1) { diff --git a/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.32bit.panic-unwind.diff b/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.32bit.panic-unwind.diff index 88bd4628c297a..0d645e117fefe 100644 --- a/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.32bit.panic-unwind.diff +++ b/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.32bit.panic-unwind.diff @@ -17,9 +17,20 @@ scope 2 { debug ptr => _3; } - scope 5 (inlined ::allocate) { + scope 5 (inlined #[track_caller] Result::, std::alloc::AllocError>::unwrap) { + let mut _12: isize; + let _13: std::alloc::AllocError; + let mut _14: !; + let mut _15: &dyn std::fmt::Debug; + let _16: &std::alloc::AllocError; + scope 6 { + } + scope 7 { + } } - scope 6 (inlined NonNull::<[u8]>::as_ptr) { + scope 8 (inlined NonNull::<[u8]>::as_ptr) { + } + scope 9 (inlined ::allocate) { } } scope 3 (inlined #[track_caller] Option::::unwrap) { @@ -37,32 +48,20 @@ + _2 = const Option::::None; StorageLive(_10); - _10 = discriminant(_2); -- switchInt(move _10) -> [0: bb3, 1: bb4, otherwise: bb2]; +- switchInt(move _10) -> [0: bb2, 1: bb3, otherwise: bb1]; + _10 = const 0_isize; -+ switchInt(const 0_isize) -> [0: bb3, 1: bb4, otherwise: bb2]; ++ switchInt(const 0_isize) -> [0: bb2, 1: bb3, otherwise: bb1]; } bb1: { - StorageDead(_6); - _4 = copy _5 as *mut [u8] (Transmute); - StorageDead(_5); - _3 = move _4 as *mut u8 (PtrToPtr); - StorageDead(_4); - StorageDead(_3); -- StorageDead(_1); -+ nop; - return; - } - - bb2: { unreachable; } - bb3: { + bb2: { _11 = option::unwrap_failed() -> unwind continue; } - bb4: { + bb3: { - _1 = move ((_2 as Some).0: std::alloc::Layout); + _1 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }}; StorageDead(_10); @@ -76,19 +75,55 @@ _7 = copy _9; StorageLive(_8); - _8 = copy _1; -- _6 = std::alloc::Global::alloc_impl(move _7, move _8, const false) -> [return: bb5, unwind continue]; +- _6 = std::alloc::Global::alloc_impl(move _7, move _8, const false) -> [return: bb7, unwind continue]; + _8 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }}; -+ _6 = std::alloc::Global::alloc_impl(copy _9, const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb5, unwind continue]; ++ _6 = std::alloc::Global::alloc_impl(copy _9, const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(4 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x00000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb7, unwind continue]; + } + + bb4: { + StorageLive(_15); + _16 = &_13; + _15 = copy _16 as &dyn std::fmt::Debug (PointerCoercion(Unsize, Implicit)); + _14 = result::unwrap_failed(const "called `Result::unwrap()` on an `Err` value", move _15) -> bb6; } bb5: { + _5 = move ((_6 as Ok).0: std::ptr::NonNull<[u8]>); + StorageDead(_16); + StorageDead(_12); + StorageDead(_6); + _4 = copy _5 as *mut [u8] (Transmute); + StorageDead(_5); + _3 = move _4 as *mut u8 (PtrToPtr); + StorageDead(_4); + StorageDead(_3); +- StorageDead(_1); ++ nop; + return; + } + + bb6 (cleanup): { + resume; + } + + bb7: { StorageDead(_8); StorageDead(_7); - _5 = Result::, std::alloc::AllocError>::unwrap(move _6) -> [return: bb1, unwind continue]; + StorageLive(_12); + StorageLive(_16); + _12 = discriminant(_6); + switchInt(move _12) -> [0: bb5, 1: bb4, otherwise: bb1]; } -+ } -+ + } + +- ALLOC0 (size: 43, align: 1) { + ALLOC0 (size: 8, align: 4) { + 00 00 00 00 __ __ __ __ │ ....░░░░ ++ } ++ ++ ALLOC1 (size: 43, align: 1) { + 0x00 │ 63 61 6c 6c 65 64 20 60 52 65 73 75 6c 74 3a 3a │ called `Result:: + 0x10 │ 75 6e 77 72 61 70 28 29 60 20 6f 6e 20 61 6e 20 │ unwrap()` on an + 0x20 │ 60 45 72 72 60 20 76 61 6c 75 65 │ `Err` value } diff --git a/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.64bit.panic-abort.diff b/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.64bit.panic-abort.diff index 8641d2d6fa857..fdd9fdbf19c38 100644 --- a/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.64bit.panic-abort.diff +++ b/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.64bit.panic-abort.diff @@ -17,20 +17,20 @@ scope 2 { debug ptr => _3; } - scope 5 (inlined ::allocate) { - } - scope 6 (inlined #[track_caller] Result::, std::alloc::AllocError>::unwrap) { + scope 5 (inlined #[track_caller] Result::, std::alloc::AllocError>::unwrap) { let mut _12: isize; let _13: std::alloc::AllocError; let mut _14: !; let mut _15: &dyn std::fmt::Debug; let _16: &std::alloc::AllocError; - scope 7 { + scope 6 { } - scope 8 { + scope 7 { } } - scope 9 (inlined NonNull::<[u8]>::as_ptr) { + scope 8 (inlined NonNull::<[u8]>::as_ptr) { + } + scope 9 (inlined ::allocate) { } } scope 3 (inlined #[track_caller] Option::::unwrap) { @@ -75,28 +75,19 @@ _7 = copy _9; StorageLive(_8); - _8 = copy _1; -- _6 = std::alloc::Global::alloc_impl(move _7, move _8, const false) -> [return: bb4, unwind unreachable]; +- _6 = std::alloc::Global::alloc_impl(move _7, move _8, const false) -> [return: bb6, unwind unreachable]; + _8 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(8 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x0000000000000000): std::ptr::alignment::AlignmentEnum) }}; -+ _6 = std::alloc::Global::alloc_impl(copy _9, const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(8 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x0000000000000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb4, unwind unreachable]; ++ _6 = std::alloc::Global::alloc_impl(copy _9, const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(8 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x0000000000000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb6, unwind unreachable]; } bb4: { - StorageDead(_8); - StorageDead(_7); - StorageLive(_12); - StorageLive(_16); - _12 = discriminant(_6); - switchInt(move _12) -> [0: bb6, 1: bb5, otherwise: bb1]; - } - - bb5: { StorageLive(_15); _16 = &_13; _15 = copy _16 as &dyn std::fmt::Debug (PointerCoercion(Unsize, Implicit)); _14 = result::unwrap_failed(const "called `Result::unwrap()` on an `Err` value", move _15) -> unwind unreachable; } - bb6: { + bb5: { _5 = move ((_6 as Ok).0: std::ptr::NonNull<[u8]>); StorageDead(_16); StorageDead(_12); @@ -110,6 +101,15 @@ + nop; return; } + + bb6: { + StorageDead(_8); + StorageDead(_7); + StorageLive(_12); + StorageLive(_16); + _12 = discriminant(_6); + switchInt(move _12) -> [0: bb5, 1: bb4, otherwise: bb1]; + } } - ALLOC0 (size: 43, align: 1) { diff --git a/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.64bit.panic-unwind.diff b/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.64bit.panic-unwind.diff index 0c52f1e058367..85876dda56692 100644 --- a/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.64bit.panic-unwind.diff +++ b/tests/mir-opt/pre-codegen/issue_117368_print_invalid_constant.main.GVN.64bit.panic-unwind.diff @@ -17,9 +17,20 @@ scope 2 { debug ptr => _3; } - scope 5 (inlined ::allocate) { + scope 5 (inlined #[track_caller] Result::, std::alloc::AllocError>::unwrap) { + let mut _12: isize; + let _13: std::alloc::AllocError; + let mut _14: !; + let mut _15: &dyn std::fmt::Debug; + let _16: &std::alloc::AllocError; + scope 6 { + } + scope 7 { + } } - scope 6 (inlined NonNull::<[u8]>::as_ptr) { + scope 8 (inlined NonNull::<[u8]>::as_ptr) { + } + scope 9 (inlined ::allocate) { } } scope 3 (inlined #[track_caller] Option::::unwrap) { @@ -37,32 +48,20 @@ + _2 = const Option::::None; StorageLive(_10); - _10 = discriminant(_2); -- switchInt(move _10) -> [0: bb3, 1: bb4, otherwise: bb2]; +- switchInt(move _10) -> [0: bb2, 1: bb3, otherwise: bb1]; + _10 = const 0_isize; -+ switchInt(const 0_isize) -> [0: bb3, 1: bb4, otherwise: bb2]; ++ switchInt(const 0_isize) -> [0: bb2, 1: bb3, otherwise: bb1]; } bb1: { - StorageDead(_6); - _4 = copy _5 as *mut [u8] (Transmute); - StorageDead(_5); - _3 = move _4 as *mut u8 (PtrToPtr); - StorageDead(_4); - StorageDead(_3); -- StorageDead(_1); -+ nop; - return; - } - - bb2: { unreachable; } - bb3: { + bb2: { _11 = option::unwrap_failed() -> unwind continue; } - bb4: { + bb3: { - _1 = move ((_2 as Some).0: std::alloc::Layout); + _1 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(8 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x0000000000000000): std::ptr::alignment::AlignmentEnum) }}; StorageDead(_10); @@ -76,19 +75,55 @@ _7 = copy _9; StorageLive(_8); - _8 = copy _1; -- _6 = std::alloc::Global::alloc_impl(move _7, move _8, const false) -> [return: bb5, unwind continue]; +- _6 = std::alloc::Global::alloc_impl(move _7, move _8, const false) -> [return: bb7, unwind continue]; + _8 = const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(8 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x0000000000000000): std::ptr::alignment::AlignmentEnum) }}; -+ _6 = std::alloc::Global::alloc_impl(copy _9, const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(8 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x0000000000000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb5, unwind continue]; ++ _6 = std::alloc::Global::alloc_impl(copy _9, const Layout {{ size: Indirect { alloc_id: ALLOC0, offset: Size(8 bytes) }: usize, align: std::ptr::Alignment(Scalar(0x0000000000000000): std::ptr::alignment::AlignmentEnum) }}, const false) -> [return: bb7, unwind continue]; + } + + bb4: { + StorageLive(_15); + _16 = &_13; + _15 = copy _16 as &dyn std::fmt::Debug (PointerCoercion(Unsize, Implicit)); + _14 = result::unwrap_failed(const "called `Result::unwrap()` on an `Err` value", move _15) -> bb6; } bb5: { + _5 = move ((_6 as Ok).0: std::ptr::NonNull<[u8]>); + StorageDead(_16); + StorageDead(_12); + StorageDead(_6); + _4 = copy _5 as *mut [u8] (Transmute); + StorageDead(_5); + _3 = move _4 as *mut u8 (PtrToPtr); + StorageDead(_4); + StorageDead(_3); +- StorageDead(_1); ++ nop; + return; + } + + bb6 (cleanup): { + resume; + } + + bb7: { StorageDead(_8); StorageDead(_7); - _5 = Result::, std::alloc::AllocError>::unwrap(move _6) -> [return: bb1, unwind continue]; + StorageLive(_12); + StorageLive(_16); + _12 = discriminant(_6); + switchInt(move _12) -> [0: bb5, 1: bb4, otherwise: bb1]; } -+ } -+ + } + +- ALLOC0 (size: 43, align: 1) { + ALLOC0 (size: 16, align: 8) { + 00 00 00 00 00 00 00 00 __ __ __ __ __ __ __ __ │ ........░░░░░░░░ ++ } ++ ++ ALLOC1 (size: 43, align: 1) { + 0x00 │ 63 61 6c 6c 65 64 20 60 52 65 73 75 6c 74 3a 3a │ called `Result:: + 0x10 │ 75 6e 77 72 61 70 28 29 60 20 6f 6e 20 61 6e 20 │ unwrap()` on an + 0x20 │ 60 45 72 72 60 20 76 61 6c 75 65 │ `Err` value } diff --git a/tests/mir-opt/pre-codegen/slice_index.slice_ptr_get_unchecked_range.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_index.slice_ptr_get_unchecked_range.PreCodegen.after.panic-abort.mir index ad1ca5dff43a9..fcb95416fc771 100644 --- a/tests/mir-opt/pre-codegen/slice_index.slice_ptr_get_unchecked_range.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/slice_index.slice_ptr_get_unchecked_range.PreCodegen.after.panic-abort.mir @@ -12,17 +12,17 @@ fn slice_ptr_get_unchecked_range(_1: *const [u32], _2: std::ops::Range) - let _6: (); let _7: usize; scope 3 { - scope 6 (inlined core::slice::index::get_offset_len_noubcheck::) { + scope 4 (inlined core::slice::index::get_offset_len_noubcheck::) { let _8: *const u32; - scope 7 { + scope 5 { let _9: *const u32; - scope 8 { + scope 6 { } } } } - scope 4 (inlined std::ptr::const_ptr::::len) { - scope 5 (inlined std::ptr::metadata::<[u32]>) { + scope 7 (inlined std::ptr::const_ptr::::len) { + scope 8 (inlined std::ptr::metadata::<[u32]>) { } } } diff --git a/tests/mir-opt/pre-codegen/slice_index.slice_ptr_get_unchecked_range.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_index.slice_ptr_get_unchecked_range.PreCodegen.after.panic-unwind.mir index ad1ca5dff43a9..fcb95416fc771 100644 --- a/tests/mir-opt/pre-codegen/slice_index.slice_ptr_get_unchecked_range.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/slice_index.slice_ptr_get_unchecked_range.PreCodegen.after.panic-unwind.mir @@ -12,17 +12,17 @@ fn slice_ptr_get_unchecked_range(_1: *const [u32], _2: std::ops::Range) - let _6: (); let _7: usize; scope 3 { - scope 6 (inlined core::slice::index::get_offset_len_noubcheck::) { + scope 4 (inlined core::slice::index::get_offset_len_noubcheck::) { let _8: *const u32; - scope 7 { + scope 5 { let _9: *const u32; - scope 8 { + scope 6 { } } } } - scope 4 (inlined std::ptr::const_ptr::::len) { - scope 5 (inlined std::ptr::metadata::<[u32]>) { + scope 7 (inlined std::ptr::const_ptr::::len) { + scope 8 (inlined std::ptr::metadata::<[u32]>) { } } } diff --git a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir index d389e4069d05a..f104756061f5d 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir @@ -71,25 +71,25 @@ fn enumerated_loop(_1: &[T], _2: impl Fn(usize, &T)) -> () { scope 33 (inlined std::ptr::const_ptr::::cast::<()>) { } } - scope 38 (inlined as PartialEq>::eq) { - let mut _17: *mut T; - let mut _18: *mut T; - scope 39 (inlined NonNull::::as_ptr) { - } - scope 40 (inlined NonNull::::as_ptr) { - } - } - scope 41 (inlined NonNull::::add) { + scope 38 (inlined NonNull::::add) { let mut _20: *const T; let mut _21: *const T; - scope 42 (inlined NonNull::::as_ptr) { + scope 39 (inlined NonNull::::as_ptr) { } } - scope 43 (inlined NonNull::::as_ref::<'_>) { + scope 40 (inlined NonNull::::as_ref::<'_>) { let _25: *const T; + scope 41 (inlined NonNull::::as_ptr) { + } + scope 42 (inlined std::ptr::mut_ptr::::cast_const) { + } + } + scope 43 (inlined as PartialEq>::eq) { + let mut _17: *mut T; + let mut _18: *mut T; scope 44 (inlined NonNull::::as_ptr) { } - scope 45 (inlined std::ptr::mut_ptr::::cast_const) { + scope 45 (inlined NonNull::::as_ptr) { } } } @@ -109,21 +109,21 @@ fn enumerated_loop(_1: &[T], _2: impl Fn(usize, &T)) -> () { let _9: *const T; scope 7 { } - scope 11 (inlined std::ptr::without_provenance::) { - scope 12 (inlined without_provenance_mut::) { + scope 8 (inlined std::ptr::without_provenance::) { + scope 9 (inlined without_provenance_mut::) { } } - scope 13 (inlined NonNull::::as_ptr) { + scope 10 (inlined NonNull::::as_ptr) { } - scope 14 (inlined #[track_caller] std::ptr::mut_ptr::::add) { + scope 11 (inlined #[track_caller] std::ptr::mut_ptr::::add) { } } - scope 8 (inlined NonNull::<[T]>::from_ref) { + scope 12 (inlined NonNull::<[T]>::from_ref) { let mut _4: *const [T]; } - scope 9 (inlined NonNull::<[T]>::cast::) { + scope 13 (inlined NonNull::<[T]>::cast::) { let mut _5: *const T; - scope 10 (inlined NonNull::<[T]>::as_ptr) { + scope 14 (inlined NonNull::<[T]>::as_ptr) { } } } diff --git a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-unwind.mir index 8c5fbda63921f..ca66cd2fa4d3b 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-unwind.mir @@ -34,21 +34,21 @@ fn enumerated_loop(_1: &[T], _2: impl Fn(usize, &T)) -> () { let _9: *const T; scope 7 { } - scope 11 (inlined std::ptr::without_provenance::) { - scope 12 (inlined without_provenance_mut::) { + scope 8 (inlined std::ptr::without_provenance::) { + scope 9 (inlined without_provenance_mut::) { } } - scope 13 (inlined NonNull::::as_ptr) { + scope 10 (inlined NonNull::::as_ptr) { } - scope 14 (inlined #[track_caller] std::ptr::mut_ptr::::add) { + scope 11 (inlined #[track_caller] std::ptr::mut_ptr::::add) { } } - scope 8 (inlined NonNull::<[T]>::from_ref) { + scope 12 (inlined NonNull::<[T]>::from_ref) { let mut _4: *const [T]; } - scope 9 (inlined NonNull::<[T]>::cast::) { + scope 13 (inlined NonNull::<[T]>::cast::) { let mut _5: *const T; - scope 10 (inlined NonNull::<[T]>::as_ptr) { + scope 14 (inlined NonNull::<[T]>::as_ptr) { } } } diff --git a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir index 216e05ec5b79c..6ab8559def02d 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir @@ -43,25 +43,25 @@ fn forward_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 21 (inlined std::ptr::const_ptr::::cast::<()>) { } } - scope 26 (inlined as PartialEq>::eq) { - let mut _16: *mut T; - let mut _17: *mut T; - scope 27 (inlined NonNull::::as_ptr) { - } - scope 28 (inlined NonNull::::as_ptr) { - } - } - scope 29 (inlined NonNull::::add) { + scope 26 (inlined NonNull::::add) { let mut _19: *const T; let mut _20: *const T; - scope 30 (inlined NonNull::::as_ptr) { + scope 27 (inlined NonNull::::as_ptr) { } } - scope 31 (inlined NonNull::::as_ref::<'_>) { + scope 28 (inlined NonNull::::as_ref::<'_>) { let _24: *const T; + scope 29 (inlined NonNull::::as_ptr) { + } + scope 30 (inlined std::ptr::mut_ptr::::cast_const) { + } + } + scope 31 (inlined as PartialEq>::eq) { + let mut _16: *mut T; + let mut _17: *mut T; scope 32 (inlined NonNull::::as_ptr) { } - scope 33 (inlined std::ptr::mut_ptr::::cast_const) { + scope 33 (inlined NonNull::::as_ptr) { } } } @@ -80,21 +80,21 @@ fn forward_loop(_1: &[T], _2: impl Fn(&T)) -> () { let _9: *const T; scope 7 { } - scope 11 (inlined std::ptr::without_provenance::) { - scope 12 (inlined without_provenance_mut::) { + scope 8 (inlined std::ptr::without_provenance::) { + scope 9 (inlined without_provenance_mut::) { } } - scope 13 (inlined NonNull::::as_ptr) { + scope 10 (inlined NonNull::::as_ptr) { } - scope 14 (inlined #[track_caller] std::ptr::mut_ptr::::add) { + scope 11 (inlined #[track_caller] std::ptr::mut_ptr::::add) { } } - scope 8 (inlined NonNull::<[T]>::from_ref) { + scope 12 (inlined NonNull::<[T]>::from_ref) { let mut _4: *const [T]; } - scope 9 (inlined NonNull::<[T]>::cast::) { + scope 13 (inlined NonNull::<[T]>::cast::) { let mut _5: *const T; - scope 10 (inlined NonNull::<[T]>::as_ptr) { + scope 14 (inlined NonNull::<[T]>::as_ptr) { } } } diff --git a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir index 001023919804a..d1979a11809c6 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir @@ -43,25 +43,25 @@ fn forward_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 21 (inlined std::ptr::const_ptr::::cast::<()>) { } } - scope 26 (inlined as PartialEq>::eq) { - let mut _16: *mut T; - let mut _17: *mut T; - scope 27 (inlined NonNull::::as_ptr) { - } - scope 28 (inlined NonNull::::as_ptr) { - } - } - scope 29 (inlined NonNull::::add) { + scope 26 (inlined NonNull::::add) { let mut _19: *const T; let mut _20: *const T; - scope 30 (inlined NonNull::::as_ptr) { + scope 27 (inlined NonNull::::as_ptr) { } } - scope 31 (inlined NonNull::::as_ref::<'_>) { + scope 28 (inlined NonNull::::as_ref::<'_>) { let _24: *const T; + scope 29 (inlined NonNull::::as_ptr) { + } + scope 30 (inlined std::ptr::mut_ptr::::cast_const) { + } + } + scope 31 (inlined as PartialEq>::eq) { + let mut _16: *mut T; + let mut _17: *mut T; scope 32 (inlined NonNull::::as_ptr) { } - scope 33 (inlined std::ptr::mut_ptr::::cast_const) { + scope 33 (inlined NonNull::::as_ptr) { } } } @@ -80,21 +80,21 @@ fn forward_loop(_1: &[T], _2: impl Fn(&T)) -> () { let _9: *const T; scope 7 { } - scope 11 (inlined std::ptr::without_provenance::) { - scope 12 (inlined without_provenance_mut::) { + scope 8 (inlined std::ptr::without_provenance::) { + scope 9 (inlined without_provenance_mut::) { } } - scope 13 (inlined NonNull::::as_ptr) { + scope 10 (inlined NonNull::::as_ptr) { } - scope 14 (inlined #[track_caller] std::ptr::mut_ptr::::add) { + scope 11 (inlined #[track_caller] std::ptr::mut_ptr::::add) { } } - scope 8 (inlined NonNull::<[T]>::from_ref) { + scope 12 (inlined NonNull::<[T]>::from_ref) { let mut _4: *const [T]; } - scope 9 (inlined NonNull::<[T]>::cast::) { + scope 13 (inlined NonNull::<[T]>::cast::) { let mut _5: *const T; - scope 10 (inlined NonNull::<[T]>::as_ptr) { + scope 14 (inlined NonNull::<[T]>::as_ptr) { } } } diff --git a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir index b09e36223441a..42a9cd993275d 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir @@ -34,21 +34,21 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { let _9: *const T; scope 7 { } - scope 11 (inlined std::ptr::without_provenance::) { - scope 12 (inlined without_provenance_mut::) { + scope 8 (inlined std::ptr::without_provenance::) { + scope 9 (inlined without_provenance_mut::) { } } - scope 13 (inlined NonNull::::as_ptr) { + scope 10 (inlined NonNull::::as_ptr) { } - scope 14 (inlined #[track_caller] std::ptr::mut_ptr::::add) { + scope 11 (inlined #[track_caller] std::ptr::mut_ptr::::add) { } } - scope 8 (inlined NonNull::<[T]>::from_ref) { + scope 12 (inlined NonNull::<[T]>::from_ref) { let mut _4: *const [T]; } - scope 9 (inlined NonNull::<[T]>::cast::) { + scope 13 (inlined NonNull::<[T]>::cast::) { let mut _5: *const T; - scope 10 (inlined NonNull::<[T]>::as_ptr) { + scope 14 (inlined NonNull::<[T]>::as_ptr) { } } } diff --git a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir index 12b54b57b8448..5b018c78b2e99 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir @@ -34,21 +34,21 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { let _9: *const T; scope 7 { } - scope 11 (inlined std::ptr::without_provenance::) { - scope 12 (inlined without_provenance_mut::) { + scope 8 (inlined std::ptr::without_provenance::) { + scope 9 (inlined without_provenance_mut::) { } } - scope 13 (inlined NonNull::::as_ptr) { + scope 10 (inlined NonNull::::as_ptr) { } - scope 14 (inlined #[track_caller] std::ptr::mut_ptr::::add) { + scope 11 (inlined #[track_caller] std::ptr::mut_ptr::::add) { } } - scope 8 (inlined NonNull::<[T]>::from_ref) { + scope 12 (inlined NonNull::<[T]>::from_ref) { let mut _4: *const [T]; } - scope 9 (inlined NonNull::<[T]>::cast::) { + scope 13 (inlined NonNull::<[T]>::cast::) { let mut _5: *const T; - scope 10 (inlined NonNull::<[T]>::as_ptr) { + scope 14 (inlined NonNull::<[T]>::as_ptr) { } } } diff --git a/tests/mir-opt/pre-codegen/slice_iter.slice_iter_next.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.slice_iter_next.PreCodegen.after.panic-abort.mir index c0ed0aea1e260..f7726f0b7e6b1 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.slice_iter_next.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.slice_iter_next.PreCodegen.after.panic-abort.mir @@ -28,25 +28,25 @@ fn slice_iter_next(_1: &mut std::slice::Iter<'_, T>) -> Option<&T> { scope 6 (inlined std::ptr::const_ptr::::cast::<()>) { } } - scope 11 (inlined as PartialEq>::eq) { - let mut _5: *mut T; - let mut _6: *mut T; - scope 12 (inlined NonNull::::as_ptr) { - } - scope 13 (inlined NonNull::::as_ptr) { - } - } - scope 14 (inlined NonNull::::add) { + scope 11 (inlined NonNull::::add) { let mut _8: *const T; let mut _9: *const T; - scope 15 (inlined NonNull::::as_ptr) { + scope 12 (inlined NonNull::::as_ptr) { } } - scope 16 (inlined NonNull::::as_ref::<'_>) { + scope 13 (inlined NonNull::::as_ref::<'_>) { let _13: *const T; + scope 14 (inlined NonNull::::as_ptr) { + } + scope 15 (inlined std::ptr::mut_ptr::::cast_const) { + } + } + scope 16 (inlined as PartialEq>::eq) { + let mut _5: *mut T; + let mut _6: *mut T; scope 17 (inlined NonNull::::as_ptr) { } - scope 18 (inlined std::ptr::mut_ptr::::cast_const) { + scope 18 (inlined NonNull::::as_ptr) { } } } diff --git a/tests/mir-opt/pre-codegen/slice_iter.slice_iter_next.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_iter.slice_iter_next.PreCodegen.after.panic-unwind.mir index c0ed0aea1e260..f7726f0b7e6b1 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.slice_iter_next.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.slice_iter_next.PreCodegen.after.panic-unwind.mir @@ -28,25 +28,25 @@ fn slice_iter_next(_1: &mut std::slice::Iter<'_, T>) -> Option<&T> { scope 6 (inlined std::ptr::const_ptr::::cast::<()>) { } } - scope 11 (inlined as PartialEq>::eq) { - let mut _5: *mut T; - let mut _6: *mut T; - scope 12 (inlined NonNull::::as_ptr) { - } - scope 13 (inlined NonNull::::as_ptr) { - } - } - scope 14 (inlined NonNull::::add) { + scope 11 (inlined NonNull::::add) { let mut _8: *const T; let mut _9: *const T; - scope 15 (inlined NonNull::::as_ptr) { + scope 12 (inlined NonNull::::as_ptr) { } } - scope 16 (inlined NonNull::::as_ref::<'_>) { + scope 13 (inlined NonNull::::as_ref::<'_>) { let _13: *const T; + scope 14 (inlined NonNull::::as_ptr) { + } + scope 15 (inlined std::ptr::mut_ptr::::cast_const) { + } + } + scope 16 (inlined as PartialEq>::eq) { + let mut _5: *mut T; + let mut _6: *mut T; scope 17 (inlined NonNull::::as_ptr) { } - scope 18 (inlined std::ptr::mut_ptr::::cast_const) { + scope 18 (inlined NonNull::::as_ptr) { } } } diff --git a/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-abort.mir index 2eee8a97db0d4..d7df044830503 100644 --- a/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-abort.mir @@ -13,19 +13,19 @@ fn vec_deref_to_slice(_1: &Vec) -> &[u8] { debug self => _1; scope 4 (inlined alloc::raw_vec::RawVec::::ptr) { scope 5 (inlined alloc::raw_vec::RawVecInner::ptr::) { - scope 6 (inlined alloc::raw_vec::RawVecInner::non_null::) { + scope 6 (inlined NonNull::::as_ptr) { + } + scope 7 (inlined alloc::raw_vec::RawVecInner::non_null::) { let mut _2: std::ptr::NonNull; - scope 7 (inlined Unique::::cast::) { - scope 8 (inlined NonNull::::cast::) { - scope 9 (inlined NonNull::::as_ptr) { + scope 8 (inlined Unique::::cast::) { + scope 9 (inlined NonNull::::cast::) { + scope 10 (inlined NonNull::::as_ptr) { } } } - scope 10 (inlined Unique::::as_non_null_ptr) { + scope 11 (inlined Unique::::as_non_null_ptr) { } } - scope 11 (inlined NonNull::::as_ptr) { - } } } } @@ -33,13 +33,13 @@ fn vec_deref_to_slice(_1: &Vec) -> &[u8] { debug data => _3; debug len => _4; let _5: *const [u8]; - scope 13 (inlined core::ub_checks::check_language_ub) { - scope 14 (inlined core::ub_checks::check_language_ub::runtime) { - } + scope 13 (inlined std::mem::size_of::) { } - scope 15 (inlined std::mem::size_of::) { + scope 14 (inlined std::mem::align_of::) { } - scope 16 (inlined std::mem::align_of::) { + scope 15 (inlined core::ub_checks::check_language_ub) { + scope 16 (inlined core::ub_checks::check_language_ub::runtime) { + } } scope 17 (inlined slice_from_raw_parts::) { debug data => _3; diff --git a/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-unwind.mir index 2eee8a97db0d4..d7df044830503 100644 --- a/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/vec_deref.vec_deref_to_slice.PreCodegen.after.panic-unwind.mir @@ -13,19 +13,19 @@ fn vec_deref_to_slice(_1: &Vec) -> &[u8] { debug self => _1; scope 4 (inlined alloc::raw_vec::RawVec::::ptr) { scope 5 (inlined alloc::raw_vec::RawVecInner::ptr::) { - scope 6 (inlined alloc::raw_vec::RawVecInner::non_null::) { + scope 6 (inlined NonNull::::as_ptr) { + } + scope 7 (inlined alloc::raw_vec::RawVecInner::non_null::) { let mut _2: std::ptr::NonNull; - scope 7 (inlined Unique::::cast::) { - scope 8 (inlined NonNull::::cast::) { - scope 9 (inlined NonNull::::as_ptr) { + scope 8 (inlined Unique::::cast::) { + scope 9 (inlined NonNull::::cast::) { + scope 10 (inlined NonNull::::as_ptr) { } } } - scope 10 (inlined Unique::::as_non_null_ptr) { + scope 11 (inlined Unique::::as_non_null_ptr) { } } - scope 11 (inlined NonNull::::as_ptr) { - } } } } @@ -33,13 +33,13 @@ fn vec_deref_to_slice(_1: &Vec) -> &[u8] { debug data => _3; debug len => _4; let _5: *const [u8]; - scope 13 (inlined core::ub_checks::check_language_ub) { - scope 14 (inlined core::ub_checks::check_language_ub::runtime) { - } + scope 13 (inlined std::mem::size_of::) { } - scope 15 (inlined std::mem::size_of::) { + scope 14 (inlined std::mem::align_of::) { } - scope 16 (inlined std::mem::align_of::) { + scope 15 (inlined core::ub_checks::check_language_ub) { + scope 16 (inlined core::ub_checks::check_language_ub::runtime) { + } } scope 17 (inlined slice_from_raw_parts::) { debug data => _3; diff --git a/tests/mir-opt/sroa/lifetimes.foo.ScalarReplacementOfAggregates.diff b/tests/mir-opt/sroa/lifetimes.foo.ScalarReplacementOfAggregates.diff index 0d5fcf9ef1432..9674b568a904e 100644 --- a/tests/mir-opt/sroa/lifetimes.foo.ScalarReplacementOfAggregates.diff +++ b/tests/mir-opt/sroa/lifetimes.foo.ScalarReplacementOfAggregates.diff @@ -28,12 +28,13 @@ let mut _29: isize; let mut _30: isize; let mut _31: isize; -+ let _32: std::result::Result, ::Err>; -+ let _33: u32; + let mut _32: u32; ++ let _37: std::result::Result, ::Err>; ++ let _38: u32; scope 1 { - debug foo => _1; -+ debug ((foo: Foo).0: std::result::Result, ::Err>) => _32; -+ debug ((foo: Foo).1: u32) => _33; ++ debug ((foo: Foo).0: std::result::Result, ::Err>) => _37; ++ debug ((foo: Foo).1: u32) => _38; let _5: std::result::Result, ::Err>; scope 2 { debug x => _5; @@ -44,12 +45,12 @@ debug x => _8; let _8: std::boxed::Box; let _12: (&std::boxed::Box, &u32); -+ let _34: &std::boxed::Box; -+ let _35: &u32; ++ let _39: &std::boxed::Box; ++ let _40: &u32; scope 5 { - debug args => _12; -+ debug ((args: (&Box, &u32)).0: &std::boxed::Box) => _34; -+ debug ((args: (&Box, &u32)).1: &u32) => _35; ++ debug ((args: (&Box, &u32)).0: &std::boxed::Box) => _39; ++ debug ((args: (&Box, &u32)).1: &u32) => _40; let _15: [core::fmt::rt::Argument<'_>; 2]; scope 6 { debug args => _15; @@ -60,57 +61,50 @@ } } } + scope 7 (inlined Box::::new) { + let mut _33: usize; + let mut _34: usize; + let mut _35: *mut u8; + let mut _36: *const u32; + } bb0: { _28 = const false; - StorageLive(_1); -+ StorageLive(_32); -+ StorageLive(_33); ++ StorageLive(_37); ++ StorageLive(_38); + nop; StorageLive(_2); StorageLive(_3); StorageLive(_4); - _4 = Box::::new(const 5_u32) -> [return: bb1, unwind unreachable]; + StorageLive(_32); + _32 = const 5_u32; + StorageLive(_33); + StorageLive(_34); + StorageLive(_35); + StorageLive(_36); + _33 = SizeOf(u32); + _34 = AlignOf(u32); + _35 = alloc::alloc::exchange_malloc(move _33, move _34) -> [return: bb11, unwind unreachable]; } bb1: { - _3 = move _4 as std::boxed::Box (PointerCoercion(Unsize, Implicit)); - StorageDead(_4); - _2 = Result::, ::Err>::Ok(move _3); - StorageDead(_3); -- _1 = Foo:: { x: move _2, y: const 7_u32 }; -+ _32 = move _2; -+ _33 = const 7_u32; -+ nop; - StorageDead(_2); - StorageLive(_5); - _28 = const true; -- _5 = move (_1.0: std::result::Result, ::Err>); -+ _5 = move _32; - StorageLive(_6); -- _6 = copy (_1.1: u32); -+ _6 = copy _33; - _7 = discriminant(_5); - switchInt(move _7) -> [0: bb2, otherwise: bb7]; - } - - bb2: { StorageLive(_8); _8 = move ((_5 as Ok).0: std::boxed::Box); StorageLive(_9); StorageLive(_10); StorageLive(_11); - StorageLive(_12); -+ StorageLive(_34); -+ StorageLive(_35); ++ StorageLive(_39); ++ StorageLive(_40); + nop; StorageLive(_13); _13 = &_8; StorageLive(_14); _14 = &_6; - _12 = (move _13, move _14); -+ _34 = move _13; -+ _35 = move _14; ++ _39 = move _13; ++ _40 = move _14; + nop; StorageDead(_14); StorageDead(_13); @@ -118,22 +112,22 @@ StorageLive(_16); StorageLive(_17); - _26 = deref_copy (_12.0: &std::boxed::Box); -+ _26 = deref_copy _34; ++ _26 = deref_copy _39; _17 = &(*_26); - _16 = core::fmt::rt::Argument::<'_>::new_display::>(move _17) -> [return: bb3, unwind unreachable]; + _16 = core::fmt::rt::Argument::<'_>::new_display::>(move _17) -> [return: bb2, unwind unreachable]; } - bb3: { + bb2: { StorageDead(_17); StorageLive(_18); StorageLive(_19); - _27 = deref_copy (_12.1: &u32); -+ _27 = deref_copy _35; ++ _27 = deref_copy _40; _19 = &(*_27); - _18 = core::fmt::rt::Argument::<'_>::new_display::(move _19) -> [return: bb4, unwind unreachable]; + _18 = core::fmt::rt::Argument::<'_>::new_display::(move _19) -> [return: bb3, unwind unreachable]; } - bb4: { + bb3: { StorageDead(_19); _15 = [move _16, move _18]; StorageDead(_18); @@ -147,59 +141,88 @@ StorageLive(_24); _24 = &_15; _23 = &(*_24); - _11 = core::fmt::rt::>::new_v1::<3, 2>(move _20, move _23) -> [return: bb5, unwind unreachable]; + _11 = core::fmt::rt::>::new_v1::<3, 2>(move _20, move _23) -> [return: bb4, unwind unreachable]; } - bb5: { + bb4: { StorageDead(_24); StorageDead(_23); StorageDead(_21); StorageDead(_20); - _10 = _eprint(move _11) -> [return: bb6, unwind unreachable]; + _10 = _eprint(move _11) -> [return: bb5, unwind unreachable]; } - bb6: { + bb5: { StorageDead(_11); StorageDead(_15); - StorageDead(_12); -+ StorageDead(_34); -+ StorageDead(_35); ++ StorageDead(_39); ++ StorageDead(_40); + nop; StorageDead(_10); _9 = const (); StorageDead(_9); _0 = const (); - drop(_8) -> [return: bb8, unwind unreachable]; + drop(_8) -> [return: bb7, unwind unreachable]; } - bb7: { + bb6: { _0 = const (); - goto -> bb9; + goto -> bb8; } - bb8: { + bb7: { StorageDead(_8); - goto -> bb9; + goto -> bb8; } - bb9: { + bb8: { StorageDead(_6); _29 = discriminant(_5); - switchInt(move _29) -> [0: bb10, otherwise: bb11]; + switchInt(move _29) -> [0: bb9, otherwise: bb10]; } - bb10: { + bb9: { _28 = const false; StorageDead(_5); - StorageDead(_1); -+ StorageDead(_32); -+ StorageDead(_33); ++ StorageDead(_37); ++ StorageDead(_38); + nop; return; } + bb10: { + drop(_5) -> [return: bb9, unwind unreachable]; + } + bb11: { - drop(_5) -> [return: bb10, unwind unreachable]; + _4 = ShallowInitBox(move _35, u32); + _36 = copy ((_4.0: std::ptr::Unique).0: std::ptr::NonNull) as *const u32 (Transmute); + (*_36) = move _32; + StorageDead(_36); + StorageDead(_35); + StorageDead(_34); + StorageDead(_33); + StorageDead(_32); + _3 = move _4 as std::boxed::Box (PointerCoercion(Unsize, Implicit)); + StorageDead(_4); + _2 = Result::, ::Err>::Ok(move _3); + StorageDead(_3); +- _1 = Foo:: { x: move _2, y: const 7_u32 }; ++ _37 = move _2; ++ _38 = const 7_u32; ++ nop; + StorageDead(_2); + StorageLive(_5); + _28 = const true; +- _5 = move (_1.0: std::result::Result, ::Err>); ++ _5 = move _37; + StorageLive(_6); +- _6 = copy (_1.1: u32); ++ _6 = copy _38; + _7 = discriminant(_5); + switchInt(move _7) -> [0: bb1, otherwise: bb6]; } }