diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs index a18ae79f318df..a4ba04ef8347f 100644 --- a/compiler/rustc_const_eval/src/const_eval/machine.rs +++ b/compiler/rustc_const_eval/src/const_eval/machine.rs @@ -289,13 +289,91 @@ impl<'tcx> CompileTimeInterpCx<'tcx> { } // Other ways of comparing integers and pointers can never be known for sure. (Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => 2, - // FIXME: return a `1` for when both sides are the same pointer, *except* that - // some things (like functions and vtables) do not have stable addresses - // so we need to be careful around them (see e.g. #73722). - // FIXME: return `0` for at least some comparisons where we can reliably - // determine the result of runtime inequality tests at compile-time. - // Examples include comparison of addresses in different static items. - (Scalar::Ptr(..), Scalar::Ptr(..)) => 2, + (Scalar::Ptr(a, _), Scalar::Ptr(b, _)) => { + let (a_prov, a_offset) = a.prov_and_relative_offset(); + let (b_prov, b_offset) = b.prov_and_relative_offset(); + let a_allocid = a_prov.alloc_id(); + let b_allocid = b_prov.alloc_id(); + let a_info = self.get_alloc_info(a_allocid); + let b_info = self.get_alloc_info(b_allocid); + + // Check if the pointers cannot be equal due to alignment + if a_info.align > Align::ONE && b_info.align > Align::ONE { + let min_align = Ord::min(a_info.align.bytes(), b_info.align.bytes()); + let a_residue = a_offset.bytes() % min_align; + let b_residue = b_offset.bytes() % min_align; + if a_residue != b_residue { + // If the two pointers have a different residue from their + // common alignment, they cannot be equal. + return interp_ok(0); + } + // The pointers have the same residue modulo their common alignment, + // so they could be equal. Try the other checks. + } + + if a_allocid == b_allocid { + match self.tcx.try_get_global_alloc(a_allocid) { + None => 2, + // A static cannot be duplicated, so if two pointers are into the same + // static, they are equal if and only if their offsets into the static + // are equal + Some(GlobalAlloc::Static(_)) => (a_offset == b_offset) as u8, + // Functions and vtables can be duplicated (and deduplicated), so we + // cannot be sure of runtime equality of pointers to the same one, (or the + // runtime inequality of pointers to different ones) (see e.g. #73722). + Some(GlobalAlloc::Function { .. } | GlobalAlloc::VTable(..)) => 2, + // FIXME: Can these be duplicated (or deduplicated)? + Some(GlobalAlloc::Memory(..) | GlobalAlloc::TypeId { .. }) => 2, + } + } else { + if let (Some(GlobalAlloc::Static(a_did)), Some(GlobalAlloc::Static(b_did))) = ( + self.tcx.try_get_global_alloc(a_allocid), + self.tcx.try_get_global_alloc(b_allocid), + ) { + debug_assert_ne!( + a_did, b_did, + "same static item DefId had two different AllocIds? {a_allocid:?} != {b_allocid:?}, {a_did:?} == {b_did:?}" + ); + + if a_info.size == Size::ZERO || b_info.size == Size::ZERO { + // One or both allocations is zero-sized, so we can't know if the + // pointers are (in)equal. + // FIXME: Can zero-sized static be "within" non-zero-sized statics? + // Conservatively we say yes, since that doesn't cause them to + // "overlap" any bytes, but if not, then we could delete this branch + // and have the other branches handle ZST allocations. + 2 + } else if a_offset > a_info.size || b_offset > b_info.size { + // One or both pointers are out of bounds of their allocation, + // so conservatively say we don't know. + // FIXME: we could reason about how far out of bounds the pointers are, + // e.g. two pointers cannot be equal if them being equal would require + // their statics to overlap. + 2 + } else if (a_offset == Size::ZERO && b_offset == b_info.size) + || (a_offset == a_info.size && b_offset == Size::ZERO) + { + // The pointers are on opposite ends of different allocations, we + // cannot know if they are equal, since the allocations may end up + // adjacent at runtime. + 2 + } else { + // The pointers are within (or one past the end of) different + // non-zero-sized static allocations, and they are not at oppotiste + // ends, so we know they are not equal because statics cannot + // overlap or be deduplicated. + 0 + } + } else { + // Even if one of them is a static, as per https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness + // immutable statics can overlap with other kinds of allocations somtimes. + // FIXME: We could be more decisive for mutable statics, which cannot + // overlap with other kinds of allocations. + // FIXME: Can we determine any other cases? + 2 + } + } + } }) } } diff --git a/tests/ui/const-ptr/guaranteed_cmp.rs b/tests/ui/const-ptr/guaranteed_cmp.rs new file mode 100644 index 0000000000000..645ca35598d1d --- /dev/null +++ b/tests/ui/const-ptr/guaranteed_cmp.rs @@ -0,0 +1,171 @@ +//@ build-pass +//@ edition: 2024 +#![feature(const_raw_ptr_comparison)] +#![feature(fn_align)] +// Generally: +// For any `Some` return, `None` would also be valid, unless otherwise noted. +// For any `None` return, only `None` is valid, unless otherwise noted. + +macro_rules! do_test { + ($a:expr, $b:expr, $expected:pat) => { + const _: () = { + let a: *const _ = $a; + let b: *const _ = $b; + assert!(matches!(<*const u8>::guaranteed_eq(a.cast(), b.cast()), $expected)); + }; + }; +} + +#[repr(align(2))] +struct T(#[allow(unused)] u16); + +#[repr(align(2))] +struct AlignedZst; + +static A: T = T(42); +static B: T = T(42); +static mut MUT_STATIC: T = T(42); +static ZST: () = (); +static ALIGNED_ZST: AlignedZst = AlignedZst; +static LARGE_WORD_ALIGNED: [usize; 2] = [0, 1]; +static mut MUT_LARGE_WORD_ALIGNED: [usize; 2] = [0, 1]; + +const FN_PTR: *const () = { + fn foo() {} + unsafe { std::mem::transmute(foo as fn()) } +}; + +const ALIGNED_FN_PTR: *const () = { + #[rustc_align(2)] + fn aligned_foo() {} + unsafe { std::mem::transmute(aligned_foo as fn()) } +}; + +// Only on armv5te-* and armv4t-* +#[cfg(all( + target_arch = "arm", + not(target_feature = "v6"), +))] +const ALIGNED_THUMB_FN_PTR: *const () = { + #[rustc_align(2)] + #[instruction_set(arm::t32)] + fn aligned_thumb_foo() {} + unsafe { std::mem::transmute(aligned_thumb_foo as fn()) } +}; + +trait Trait { + #[allow(unused)] + fn method(&self) -> u8; +} +impl Trait for u32 { + fn method(&self) -> u8 { 1 } +} +impl Trait for i32 { + fn method(&self) -> u8 { 2 } +} + +const VTABLE_PTR_1: *const () = { + let [_data, vtable] = unsafe { + std::mem::transmute::<&dyn Trait, [*const (); 2]>(&42_u32 as &dyn Trait) + }; + vtable +}; +const VTABLE_PTR_2: *const () = { + let [_data, vtable] = unsafe { + std::mem::transmute::<&dyn Trait, [*const (); 2]>(&42_i32 as &dyn Trait) + }; + vtable +}; + +// Cannot be `None`: static's address, references, and `fn` pointers cannot be null, +// and `is_null` is stable with strong guarantees, and `is_null` is implemented using +// `guaranteed_cmp`. +do_test!(&A, std::ptr::null::<()>(), Some(false)); +do_test!(&ZST, std::ptr::null::<()>(), Some(false)); +do_test!(&(), std::ptr::null::<()>(), Some(false)); +do_test!(const { &() }, std::ptr::null::<()>(), Some(false)); +do_test!(FN_PTR, std::ptr::null::<()>(), Some(false)); + +// Statics cannot be duplicated +do_test!(&A, &A, Some(true)); + +// Two non-ZST statics cannot have the same address +do_test!(&A, &B, Some(false)); +do_test!(&A, &raw const MUT_STATIC, Some(false)); + +// One-past-the-end of one static can be equal to the address of another static. +do_test!(&A, (&raw const B).wrapping_add(1), None); + +// Cannot know if ZST static is at the same address with anything non-null (if alignment allows). +do_test!(&A, &ZST, None); +do_test!(&A, &ALIGNED_ZST, None); + +// Unclear if ZST statics can be placed "in the middle of" non-ZST statics. +// For now, we conservatively say they could, and return None here. +do_test!(&ZST, (&raw const A).wrapping_byte_add(1), None); + +// As per https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness +// immutable statics are allowed to overlap with const items and promoteds. +do_test!(&A, &T(42), None); +do_test!(&A, const { &T(42) }, None); +do_test!(&A, { const X: T = T(42); &X }, None); + +// These could return Some(false), since only immutable statics can overlap with const items +// and promoteds. +do_test!(&raw const MUT_STATIC, &T(42), None); +do_test!(&raw const MUT_STATIC, const { &T(42) }, None); +do_test!(&raw const MUT_STATIC, { const X: T = T(42); &X }, None); + +// An odd offset from a 2-aligned allocation can never be equal to an even offset from a +// 2-aligned allocation, even if the offsets are out-of-bounds. +do_test!(&A, (&raw const B).wrapping_byte_add(1), Some(false)); +do_test!(&A, (&raw const B).wrapping_byte_add(5), Some(false)); +do_test!(&A, (&raw const ALIGNED_ZST).wrapping_byte_add(1), Some(false)); +do_test!(&ALIGNED_ZST, (&raw const A).wrapping_byte_add(1), Some(false)); +do_test!(&A, (&T(42) as *const T).wrapping_byte_add(1), Some(false)); +do_test!(&A, (const { &T(42) } as *const T).wrapping_byte_add(1), Some(false)); +do_test!(&A, ({ const X: T = T(42); &X } as *const T).wrapping_byte_add(1), Some(false)); + +// Pointers into the same static are equal if and only if their offset is the same, +// even if either is out-of-bounds. +do_test!(&A, &A, Some(true)); +do_test!(&A, &A.0, Some(true)); +do_test!(&A, (&raw const A).wrapping_byte_add(1), Some(false)); +do_test!(&A, (&raw const A).wrapping_byte_add(2), Some(false)); +do_test!(&A, (&raw const A).wrapping_byte_add(51), Some(false)); +do_test!((&raw const A).wrapping_byte_add(51), (&raw const A).wrapping_byte_add(51), Some(true)); + +// Pointers to the same fn may be unequal, since `fn`s can be duplicated. +do_test!(FN_PTR, FN_PTR, None); +do_test!(ALIGNED_FN_PTR, ALIGNED_FN_PTR, None); + +// Pointers to different fns may be equal, since `fn`s can be deduplicated. +do_test!(FN_PTR, ALIGNED_FN_PTR, None); + +// Pointers to the same vtable may be unequal, since vtables can be duplicated. +do_test!(VTABLE_PTR_1, VTABLE_PTR_1, None); + +// Pointers to different vtables may be equal, since vtables can be deduplicated. +do_test!(VTABLE_PTR_1, VTABLE_PTR_2, None); + +// Function pointers to aligned function allocations are not necessarily actually aligned, +// due to platform-specific semantics. +// See https://github.com/rust-lang/rust/issues/144661 +// FIXME: This could return `Some` on platforms where function pointers' addresses actually +// correspond to function addresses including alignment, or on ARM if t32 function pointers +// have their low bit set for consteval. +do_test!(ALIGNED_FN_PTR, ALIGNED_FN_PTR.wrapping_byte_offset(1), None); +#[cfg(all( + target_arch = "arm", + not(target_feature = "v6"), +))] +do_test!(ALIGNED_THUMB_FN_PTR, ALIGNED_THUMB_FN_PTR.wrapping_byte_offset(1), None); + +// Conservatively say we don't know. +do_test!(FN_PTR, VTABLE_PTR_1, None); +do_test!((&raw const LARGE_WORD_ALIGNED).cast::().wrapping_add(1), VTABLE_PTR_1, None); +do_test!((&raw const MUT_LARGE_WORD_ALIGNED).cast::().wrapping_add(1), VTABLE_PTR_1, None); +do_test!((&raw const LARGE_WORD_ALIGNED).cast::().wrapping_add(1), FN_PTR, None); +do_test!((&raw const MUT_LARGE_WORD_ALIGNED).cast::().wrapping_add(1), FN_PTR, None); + +fn main() {}