diff --git a/src/intptrcast.rs b/src/intptrcast.rs index 83739c48ef..c48ecb3100 100644 --- a/src/intptrcast.rs +++ b/src/intptrcast.rs @@ -191,11 +191,9 @@ impl<'mir, 'tcx> GlobalStateInner { slack, ); - // Remember next base address. Leave a gap of at least 1 to avoid two zero-sized allocations - // having the same base address, and to avoid ambiguous provenance for the address between two - // allocations (also see https://github.com/rust-lang/unsafe-code-guidelines/issues/313). - let size_plus_1 = size.bytes().checked_add(1).unwrap(); - global_state.next_base_addr = base_addr.checked_add(size_plus_1).unwrap(); + // Remember next base address. We *do* allow allocations to touch each other, + // and ZST allocations to have the same address. + global_state.next_base_addr = base_addr.checked_add(size.bytes()).unwrap(); // Given that `next_base_addr` increases in each allocation, pushing the // corresponding tuple keeps `int_to_ptr_map` sorted global_state.int_to_ptr_map.push((base_addr, alloc_id)); diff --git a/tests/pass/adjacent-allocs.rs b/tests/pass/adjacent-allocs.rs index b3483a5b43..f923e3e976 100644 --- a/tests/pass/adjacent-allocs.rs +++ b/tests/pass/adjacent-allocs.rs @@ -1,5 +1,35 @@ // compile-flags: -Zmiri-permissive-provenance +fn ensure_allocs_can_be_adjacent() { + for _ in 0..512 { + let n = 0u64; + let ptr: *const u64 = &n; + let ptr2 = { + let m = 0u64; + &m as *const u64 + }; + if ptr.wrapping_add(1) == ptr2 { + return + } + } + panic!("never saw adjacent stack variables?"); +} + +fn ensure_zst_allocs_can_be_adjacent() { + for _ in 0..512 { + let n = (); + let ptr: *const () = &n; + let ptr2 = { + let m = (); + &m as *const () + }; + if ptr == ptr2 { + return + } + } + panic!("never saw adjacent zero-sized stack variables?"); +} + fn test1() { // The slack between allocations is random. // Loop a few times to hit the zero-slack case. @@ -42,6 +72,8 @@ fn test2() { } fn main() { + ensure_allocs_can_be_adjacent(); + ensure_zst_allocs_can_be_adjacent(); test1(); test2(); }