Skip to content

Commit

Permalink
Unrolled build for rust-lang#128531
Browse files Browse the repository at this point in the history
Rollup merge of rust-lang#128531 - RalfJung:miri-recursive-validity, r=saethlin

Miri: add a flag to do recursive validity checking

The point of this flag is to allow gathering experimental data for rust-lang/unsafe-code-guidelines#412.
  • Loading branch information
rust-timer authored Aug 4, 2024
2 parents 58fb508 + 21c0251 commit 6393034
Show file tree
Hide file tree
Showing 14 changed files with 186 additions and 107 deletions.
2 changes: 1 addition & 1 deletion compiler/rustc_const_eval/src/const_eval/eval_queries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -396,7 +396,7 @@ fn const_validate_mplace<'tcx>(
let alloc_id = mplace.ptr().provenance.unwrap().alloc_id();
let mut ref_tracking = RefTracking::new(mplace.clone());
let mut inner = false;
while let Some((mplace, path)) = ref_tracking.todo.pop() {
while let Some((mplace, path)) = ref_tracking.next() {
let mode = match ecx.tcx.static_mutability(cid.instance.def_id()) {
_ if cid.promoted.is_some() => CtfeValidationMode::Promoted,
Some(mutbl) => CtfeValidationMode::Static { mutbl }, // a `static`
Expand Down
7 changes: 7 additions & 0 deletions compiler/rustc_const_eval/src/interpret/machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,13 @@ pub trait Machine<'tcx>: Sized {

/// Whether to enforce the validity invariant for a specific layout.
fn enforce_validity(ecx: &InterpCx<'tcx, Self>, layout: TyAndLayout<'tcx>) -> bool;
/// Whether to enforce the validity invariant *recursively*.
fn enforce_validity_recursively(
_ecx: &InterpCx<'tcx, Self>,
_layout: TyAndLayout<'tcx>,
) -> bool {
false
}

/// Whether function calls should be [ABI](CallAbi)-checked.
fn enforce_abi(_ecx: &InterpCx<'tcx, Self>) -> bool {
Expand Down
5 changes: 4 additions & 1 deletion compiler/rustc_const_eval/src/interpret/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1006,8 +1006,11 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
})
}

/// Runs the close in "validation" mode, which means the machine's memory read hooks will be
/// Runs the closure in "validation" mode, which means the machine's memory read hooks will be
/// suppressed. Needless to say, this must only be set with great care! Cannot be nested.
///
/// We do this so Miri's allocation access tracking does not show the validation
/// reads as spurious accesses.
pub(super) fn run_for_validation<R>(&self, f: impl FnOnce() -> R) -> R {
// This deliberately uses `==` on `bool` to follow the pattern
// `assert!(val.replace(new) == old)`.
Expand Down
15 changes: 12 additions & 3 deletions compiler/rustc_const_eval/src/interpret/place.rs
Original file line number Diff line number Diff line change
Expand Up @@ -572,7 +572,10 @@ where

if M::enforce_validity(self, dest.layout()) {
// Data got changed, better make sure it matches the type!
self.validate_operand(&dest.to_op(self)?)?;
self.validate_operand(
&dest.to_op(self)?,
M::enforce_validity_recursively(self, dest.layout()),
)?;
}

Ok(())
Expand Down Expand Up @@ -811,15 +814,21 @@ where
// Generally for transmutation, data must be valid both at the old and new type.
// But if the types are the same, the 2nd validation below suffices.
if src.layout().ty != dest.layout().ty && M::enforce_validity(self, src.layout()) {
self.validate_operand(&src.to_op(self)?)?;
self.validate_operand(
&src.to_op(self)?,
M::enforce_validity_recursively(self, src.layout()),
)?;
}

// Do the actual copy.
self.copy_op_no_validate(src, dest, allow_transmute)?;

if validate_dest && M::enforce_validity(self, dest.layout()) {
// Data got changed, better make sure it matches the type!
self.validate_operand(&dest.to_op(self)?)?;
self.validate_operand(
&dest.to_op(self)?,
M::enforce_validity_recursively(self, dest.layout()),
)?;
}

Ok(())
Expand Down
176 changes: 104 additions & 72 deletions compiler/rustc_const_eval/src/interpret/validity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,8 @@ impl CtfeValidationMode {

/// State for tracking recursive validation of references
pub struct RefTracking<T, PATH = ()> {
pub seen: FxHashSet<T>,
pub todo: Vec<(T, PATH)>,
seen: FxHashSet<T>,
todo: Vec<(T, PATH)>,
}

impl<T: Clone + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> {
Expand All @@ -169,8 +169,11 @@ impl<T: Clone + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH>
ref_tracking_for_consts.seen.insert(op);
ref_tracking_for_consts
}
pub fn next(&mut self) -> Option<(T, PATH)> {
self.todo.pop()
}

pub fn track(&mut self, op: T, path: impl FnOnce() -> PATH) {
fn track(&mut self, op: T, path: impl FnOnce() -> PATH) {
if self.seen.insert(op.clone()) {
trace!("Recursing below ptr {:#?}", op);
let path = path();
Expand Down Expand Up @@ -435,95 +438,112 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
if self.ecx.scalar_may_be_null(Scalar::from_maybe_pointer(place.ptr(), self.ecx))? {
throw_validation_failure!(self.path, NullPtr { ptr_kind })
}
// Do not allow pointers to uninhabited types.
// Do not allow references to uninhabited types.
if place.layout.abi.is_uninhabited() {
let ty = place.layout.ty;
throw_validation_failure!(self.path, PtrToUninhabited { ptr_kind, ty })
}
// Recursive checking
if let Some(ref_tracking) = self.ref_tracking.as_deref_mut() {
// Determine whether this pointer expects to be pointing to something mutable.
let ptr_expected_mutbl = match ptr_kind {
PointerKind::Box => Mutability::Mut,
PointerKind::Ref(mutbl) => {
// We do not take into account interior mutability here since we cannot know if
// there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check
// that in the recursive descent behind this reference (controlled by
// `allow_immutable_unsafe_cell`).
mutbl
}
};
// Proceed recursively even for ZST, no reason to skip them!
// `!` is a ZST and we want to validate it.
if let Ok((alloc_id, _offset, _prov)) = self.ecx.ptr_try_get_alloc_id(place.ptr(), 0) {
if let Some(ctfe_mode) = self.ctfe_mode {
let mut skip_recursive_check = false;
if let Some(GlobalAlloc::Static(did)) = self.ecx.tcx.try_get_global_alloc(alloc_id)
// CTFE imposes restrictions on what references can point to.
if let Ok((alloc_id, _offset, _prov)) =
self.ecx.ptr_try_get_alloc_id(place.ptr(), 0)
{
let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else { bug!() };
// Special handling for pointers to statics (irrespective of their type).
assert!(!self.ecx.tcx.is_thread_local_static(did));
assert!(self.ecx.tcx.is_static(did));
// Mode-specific checks
match self.ctfe_mode {
Some(
CtfeValidationMode::Static { .. } | CtfeValidationMode::Promoted { .. },
) => {
// We skip recursively checking other statics. These statics must be sound by
// themselves, and the only way to get broken statics here is by using
// unsafe code.
// The reasons we don't check other statics is twofold. For one, in all
// sound cases, the static was already validated on its own, and second, we
// trigger cycle errors if we try to compute the value of the other static
// and that static refers back to us (potentially through a promoted).
// This could miss some UB, but that's fine.
// We still walk nested allocations, as they are fundamentally part of this validation run.
// This means we will also recurse into nested statics of *other*
// statics, even though we do not recurse into other statics directly.
// That's somewhat inconsistent but harmless.
skip_recursive_check = !nested;
}
Some(CtfeValidationMode::Const { .. }) => {
// We can't recursively validate `extern static`, so we better reject them.
if self.ecx.tcx.is_foreign_item(did) {
throw_validation_failure!(self.path, ConstRefToExtern);
if let Some(GlobalAlloc::Static(did)) =
self.ecx.tcx.try_get_global_alloc(alloc_id)
{
let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else {
bug!()
};
// Special handling for pointers to statics (irrespective of their type).
assert!(!self.ecx.tcx.is_thread_local_static(did));
assert!(self.ecx.tcx.is_static(did));
// Mode-specific checks
match ctfe_mode {
CtfeValidationMode::Static { .. }
| CtfeValidationMode::Promoted { .. } => {
// We skip recursively checking other statics. These statics must be sound by
// themselves, and the only way to get broken statics here is by using
// unsafe code.
// The reasons we don't check other statics is twofold. For one, in all
// sound cases, the static was already validated on its own, and second, we
// trigger cycle errors if we try to compute the value of the other static
// and that static refers back to us (potentially through a promoted).
// This could miss some UB, but that's fine.
// We still walk nested allocations, as they are fundamentally part of this validation run.
// This means we will also recurse into nested statics of *other*
// statics, even though we do not recurse into other statics directly.
// That's somewhat inconsistent but harmless.
skip_recursive_check = !nested;
}
CtfeValidationMode::Const { .. } => {
// We can't recursively validate `extern static`, so we better reject them.
if self.ecx.tcx.is_foreign_item(did) {
throw_validation_failure!(self.path, ConstRefToExtern);
}
}
}
None => {}
}
}

// Dangling and Mutability check.
let (size, _align, alloc_kind) = self.ecx.get_alloc_info(alloc_id);
if alloc_kind == AllocKind::Dead {
// This can happen for zero-sized references. We can't have *any* references to non-existing
// allocations though, interning rejects them all as the rest of rustc isn't happy with them...
// so we throw an error, even though this isn't really UB.
// A potential future alternative would be to resurrect this as a zero-sized allocation
// (which codegen will then compile to an aligned dummy pointer anyway).
throw_validation_failure!(self.path, DanglingPtrUseAfterFree { ptr_kind });
}
// If this allocation has size zero, there is no actual mutability here.
if size != Size::ZERO {
let alloc_actual_mutbl = mutability(self.ecx, alloc_id);
// Mutable pointer to immutable memory is no good.
if ptr_expected_mutbl == Mutability::Mut
&& alloc_actual_mutbl == Mutability::Not
{
throw_validation_failure!(self.path, MutableRefToImmutable);
// Dangling and Mutability check.
let (size, _align, alloc_kind) = self.ecx.get_alloc_info(alloc_id);
if alloc_kind == AllocKind::Dead {
// This can happen for zero-sized references. We can't have *any* references to
// non-existing allocations in const-eval though, interning rejects them all as
// the rest of rustc isn't happy with them... so we throw an error, even though
// this isn't really UB.
// A potential future alternative would be to resurrect this as a zero-sized allocation
// (which codegen will then compile to an aligned dummy pointer anyway).
throw_validation_failure!(self.path, DanglingPtrUseAfterFree { ptr_kind });
}
// In a const, everything must be completely immutable.
if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { .. })) {
// If this allocation has size zero, there is no actual mutability here.
if size != Size::ZERO {
// Determine whether this pointer expects to be pointing to something mutable.
let ptr_expected_mutbl = match ptr_kind {
PointerKind::Box => Mutability::Mut,
PointerKind::Ref(mutbl) => {
// We do not take into account interior mutability here since we cannot know if
// there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check
// that in the recursive descent behind this reference (controlled by
// `allow_immutable_unsafe_cell`).
mutbl
}
};
// Determine what it actually points to.
let alloc_actual_mutbl = mutability(self.ecx, alloc_id);
// Mutable pointer to immutable memory is no good.
if ptr_expected_mutbl == Mutability::Mut
|| alloc_actual_mutbl == Mutability::Mut
&& alloc_actual_mutbl == Mutability::Not
{
throw_validation_failure!(self.path, ConstRefToMutable);
throw_validation_failure!(self.path, MutableRefToImmutable);
}
// In a const, everything must be completely immutable.
if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { .. })) {
if ptr_expected_mutbl == Mutability::Mut
|| alloc_actual_mutbl == Mutability::Mut
{
throw_validation_failure!(self.path, ConstRefToMutable);
}
}
}
}
// Potentially skip recursive check.
if skip_recursive_check {
return Ok(());
}
} else {
// This is not CTFE, so it's Miri with recursive checking.
// FIXME: we do *not* check behind boxes, since creating a new box first creates it uninitialized
// and then puts the value in there, so briefly we have a box with uninit contents.
// FIXME: should we also skip `UnsafeCell` behind shared references? Currently that is not
// needed since validation reads bypass Stacked Borrows and data race checks.
if matches!(ptr_kind, PointerKind::Box) {
return Ok(());
}
}
let path = &self.path;
ref_tracking.track(place, || {
Expand Down Expand Up @@ -1072,11 +1092,23 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
/// `op` is assumed to cover valid memory if it is an indirect operand.
/// It will error if the bits at the destination do not match the ones described by the layout.
#[inline(always)]
pub fn validate_operand(&self, op: &OpTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
pub fn validate_operand(
&self,
op: &OpTy<'tcx, M::Provenance>,
recursive: bool,
) -> InterpResult<'tcx> {
// Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's
// still correct to not use `ctfe_mode`: that mode is for validation of the final constant
// value, it rules out things like `UnsafeCell` in awkward places. It also can make checking
// recurse through references which, for now, we don't want here, either.
self.validate_operand_internal(op, vec![], None, None)
// value, it rules out things like `UnsafeCell` in awkward places.
if !recursive {
return self.validate_operand_internal(op, vec![], None, None);
}
// Do a recursive check.
let mut ref_tracking = RefTracking::empty();
self.validate_operand_internal(op, vec![], Some(&mut ref_tracking), None)?;
while let Some((mplace, path)) = ref_tracking.todo.pop() {
self.validate_operand_internal(&mplace.into(), path, Some(&mut ref_tracking), None)?;
}
Ok(())
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ fn might_permit_raw_init_strict<'tcx>(
// This does *not* actually check that references are dereferenceable, but since all types that
// require dereferenceability also require non-null, we don't actually get any false negatives
// due to this.
Ok(cx.validate_operand(&ot).is_ok())
Ok(cx.validate_operand(&ot, /*recursive*/ false).is_ok())
}

/// Implements the 'lax' (default) version of the `might_permit_raw_init` checks; see that function for
Expand Down
2 changes: 2 additions & 0 deletions src/tools/miri/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -398,6 +398,8 @@ to Miri failing to detect cases of undefined behavior in a program.
application instead of raising an error within the context of Miri (and halting
execution). Note that code might not expect these operations to ever panic, so
this flag can lead to strange (mis)behavior.
* `-Zmiri-recursive-validation` is a *highly experimental* flag that makes validity checking
recurse below references.
* `-Zmiri-retag-fields[=<all|none|scalar>]` controls when Stacked Borrows retagging recurses into
fields. `all` means it always recurses (the default, and equivalent to `-Zmiri-retag-fields`
without an explicit value), `none` means it never recurses, `scalar` means it only recurses for
Expand Down
6 changes: 4 additions & 2 deletions src/tools/miri/src/bin/miri.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ use rustc_session::config::{CrateType, ErrorOutputType, OptLevel};
use rustc_session::search_paths::PathKind;
use rustc_session::{CtfeBacktrace, EarlyDiagCtxt};

use miri::{BacktraceStyle, BorrowTrackerMethod, ProvenanceMode, RetagFields};
use miri::{BacktraceStyle, BorrowTrackerMethod, ProvenanceMode, RetagFields, ValidationMode};

struct MiriCompilerCalls {
miri_config: miri::MiriConfig,
Expand Down Expand Up @@ -421,7 +421,9 @@ fn main() {
} else if arg == "--" {
after_dashdash = true;
} else if arg == "-Zmiri-disable-validation" {
miri_config.validate = false;
miri_config.validation = ValidationMode::No;
} else if arg == "-Zmiri-recursive-validation" {
miri_config.validation = ValidationMode::Deep;
} else if arg == "-Zmiri-disable-stacked-borrows" {
miri_config.borrow_tracker = None;
} else if arg == "-Zmiri-tree-borrows" {
Expand Down
Loading

0 comments on commit 6393034

Please sign in to comment.