diff --git a/rust-version b/rust-version index dcee3ec5da..40e42e6886 100644 --- a/rust-version +++ b/rust-version @@ -1 +1 @@ -ee621f42329069c296b4c2066b3743cc4ff0f369 +efe2f32a6b8217425f361ec7c206910c611c03ee diff --git a/src/fn_call.rs b/src/fn_call.rs index 64dcce161d..d8794fed46 100644 --- a/src/fn_call.rs +++ b/src/fn_call.rs @@ -13,8 +13,8 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' fn find_fn( &mut self, instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx, Borrow>], - dest: Option>, + args: &[OpTy<'tcx, Tag>], + dest: Option>, ret: Option, ) -> EvalResult<'tcx, Option<&'mir mir::Mir<'tcx>>> { let this = self.eval_context_mut(); @@ -55,8 +55,8 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' fn emulate_foreign_item( &mut self, def_id: DefId, - args: &[OpTy<'tcx, Borrow>], - dest: Option>, + args: &[OpTy<'tcx, Tag>], + dest: Option>, ret: Option, ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); @@ -92,7 +92,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' } else { let align = this.tcx.data_layout.pointer_align.abi; let ptr = this.memory_mut().allocate(Size::from_bytes(size), align, MiriMemoryKind::C.into()); - this.write_scalar(Scalar::Ptr(ptr.with_default_tag()), dest)?; + this.write_scalar(Scalar::Ptr(ptr), dest)?; } } "calloc" => { @@ -105,7 +105,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' } else { let size = Size::from_bytes(bytes); let align = this.tcx.data_layout.pointer_align.abi; - let ptr = this.memory_mut().allocate(size, align, MiriMemoryKind::C.into()).with_default_tag(); + let ptr = this.memory_mut().allocate(size, align, MiriMemoryKind::C.into()); this.memory_mut().get_mut(ptr.alloc_id)?.write_repeat(tcx, ptr, 0, size)?; this.write_scalar(Scalar::Ptr(ptr), dest)?; } @@ -132,7 +132,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' Align::from_bytes(align).unwrap(), MiriMemoryKind::C.into() ); - this.write_scalar(Scalar::Ptr(ptr.with_default_tag()), ret.into())?; + this.write_scalar(Scalar::Ptr(ptr), ret.into())?; } this.write_null(dest)?; } @@ -162,8 +162,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' Size::from_bytes(size), Align::from_bytes(align).unwrap(), MiriMemoryKind::Rust.into() - ) - .with_default_tag(); + ); this.write_scalar(Scalar::Ptr(ptr), dest)?; } "__rust_alloc_zeroed" => { @@ -180,8 +179,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' Size::from_bytes(size), Align::from_bytes(align).unwrap(), MiriMemoryKind::Rust.into() - ) - .with_default_tag(); + ); this.memory_mut() .get_mut(ptr.alloc_id)? .write_repeat(tcx, ptr, 0, Size::from_bytes(size))?; @@ -222,7 +220,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' Align::from_bytes(align).unwrap(), MiriMemoryKind::Rust.into(), )?; - this.write_scalar(Scalar::Ptr(new_ptr.with_default_tag()), dest)?; + this.write_scalar(Scalar::Ptr(new_ptr), dest)?; } "syscall" => { @@ -428,7 +426,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' Size::from_bytes((value.len() + 1) as u64), Align::from_bytes(1).unwrap(), MiriMemoryKind::Env.into(), - ).with_default_tag(); + ); { let alloc = this.memory_mut().get_mut(value_copy.alloc_id)?; alloc.write_bytes(tcx, value_copy, &value)?; @@ -798,13 +796,13 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' Ok(()) } - fn write_null(&mut self, dest: PlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> { + fn write_null(&mut self, dest: PlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { self.eval_context_mut().write_scalar(Scalar::from_int(0, dest.layout.size), dest) } /// Evaluates the scalar at the specified path. Returns Some(val) /// if the path could be resolved, and None otherwise - fn eval_path_scalar(&mut self, path: &[&str]) -> EvalResult<'tcx, Option>> { + fn eval_path_scalar(&mut self, path: &[&str]) -> EvalResult<'tcx, Option>> { let this = self.eval_context_mut(); if let Ok(instance) = this.resolve_path(path) { let cid = GlobalId { diff --git a/src/helpers.rs b/src/helpers.rs index 8a4cccc743..f468d25603 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -47,9 +47,9 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' /// will be true if this is frozen, false if this is in an `UnsafeCell`. fn visit_freeze_sensitive( &self, - place: MPlaceTy<'tcx, Borrow>, + place: MPlaceTy<'tcx, Tag>, size: Size, - mut action: impl FnMut(Pointer, Size, bool) -> EvalResult<'tcx>, + mut action: impl FnMut(Pointer, Size, bool) -> EvalResult<'tcx>, ) -> EvalResult<'tcx> { let this = self.eval_context_ref(); trace!("visit_frozen(place={:?}, size={:?})", *place, size); @@ -64,7 +64,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' let mut end_ptr = place.ptr; // Called when we detected an `UnsafeCell` at the given offset and size. // Calls `action` and advances `end_ptr`. - let mut unsafe_cell_action = |unsafe_cell_ptr: Scalar, unsafe_cell_size: Size| { + let mut unsafe_cell_action = |unsafe_cell_ptr: Scalar, unsafe_cell_size: Size| { if unsafe_cell_size != Size::ZERO { debug_assert_eq!(unsafe_cell_ptr.to_ptr().unwrap().alloc_id, end_ptr.to_ptr().unwrap().alloc_id); @@ -120,7 +120,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' /// Visiting the memory covered by a `MemPlace`, being aware of /// whether we are inside an `UnsafeCell` or not. struct UnsafeCellVisitor<'ecx, 'a, 'mir, 'tcx, F> - where F: FnMut(MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + where F: FnMut(MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { ecx: &'ecx MiriEvalContext<'a, 'mir, 'tcx>, unsafe_cell_action: F, @@ -131,9 +131,9 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' for UnsafeCellVisitor<'ecx, 'a, 'mir, 'tcx, F> where - F: FnMut(MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + F: FnMut(MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { - type V = MPlaceTy<'tcx, Borrow>; + type V = MPlaceTy<'tcx, Tag>; #[inline(always)] fn ecx(&self) -> &MiriEvalContext<'a, 'mir, 'tcx> { @@ -141,7 +141,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' } // Hook to detect `UnsafeCell`. - fn visit_value(&mut self, v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + fn visit_value(&mut self, v: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { trace!("UnsafeCellVisitor: {:?} {:?}", *v, v.layout.ty); let is_unsafe_cell = match v.layout.ty.sty { @@ -163,8 +163,8 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' // Make sure we visit aggregrates in increasing offset order. fn visit_aggregate( &mut self, - place: MPlaceTy<'tcx, Borrow>, - fields: impl Iterator>>, + place: MPlaceTy<'tcx, Tag>, + fields: impl Iterator>>, ) -> EvalResult<'tcx> { match place.layout.fields { layout::FieldPlacement::Array { .. } => { @@ -174,7 +174,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' } layout::FieldPlacement::Arbitrary { .. } => { // Gather the subplaces and sort them before visiting. - let mut places = fields.collect::>>>()?; + let mut places = fields.collect::>>>()?; places.sort_by_key(|place| place.ptr.get_ptr_offset(self.ecx())); self.walk_aggregate(place, places.into_iter().map(Ok)) } @@ -186,7 +186,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' } // We have to do *something* for unions. - fn visit_union(&mut self, v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + fn visit_union(&mut self, v: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { // With unions, we fall back to whatever the type says, to hopefully be consistent // with LLVM IR. @@ -200,7 +200,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' } // We should never get to a primitive, but always short-circuit somewhere above. - fn visit_primitive(&mut self, _v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + fn visit_primitive(&mut self, _v: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { bug!("we should always short-circuit before coming to a primitive") } diff --git a/src/intrinsic.rs b/src/intrinsic.rs index bb156c95df..a17f576b43 100644 --- a/src/intrinsic.rs +++ b/src/intrinsic.rs @@ -4,7 +4,7 @@ use rustc::ty::layout::{self, LayoutOf, Size}; use rustc::ty; use crate::{ - PlaceTy, OpTy, ImmTy, Immediate, Scalar, ScalarMaybeUndef, Borrow, + PlaceTy, OpTy, ImmTy, Immediate, Scalar, ScalarMaybeUndef, Tag, OperatorEvalContextExt }; @@ -13,8 +13,8 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, fn call_intrinsic( &mut self, instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx, Borrow>], - dest: PlaceTy<'tcx, Borrow>, + args: &[OpTy<'tcx, Tag>], + dest: PlaceTy<'tcx, Tag>, ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); if this.emulate_intrinsic(instance, args, dest)? { diff --git a/src/lib.rs b/src/lib.rs index 541986de55..3dbe922999 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -23,6 +23,7 @@ mod stacked_borrows; use std::collections::HashMap; use std::borrow::Cow; +use std::rc::Rc; use rand::rngs::StdRng; use rand::SeedableRng; @@ -48,7 +49,7 @@ use crate::mono_hash_map::MonoHashMap; pub use crate::stacked_borrows::{EvalContextExt as StackedBorEvalContextExt}; // Used by priroda. -pub use crate::stacked_borrows::{Borrow, Stack, Stacks, BorStackItem}; +pub use crate::stacked_borrows::{Tag, Permission, Stack, Stacks, Item}; /// Insert rustc arguments at the beginning of the argument list that Miri wants to be /// set per default, for maximal validation power. @@ -155,7 +156,7 @@ pub fn create_ecx<'a, 'mir: 'a, 'tcx: 'mir>( // Don't forget `0` terminator. cmd.push(std::char::from_u32(0).unwrap()); // Collect the pointers to the individual strings. - let mut argvs = Vec::>::new(); + let mut argvs = Vec::>::new(); for arg in config.args { // Add `0` terminator. let mut arg = arg.into_bytes(); @@ -187,7 +188,7 @@ pub fn create_ecx<'a, 'mir: 'a, 'tcx: 'mir>( Size::from_bytes(cmd_utf16.len() as u64 * 2), Align::from_bytes(2).unwrap(), MiriMemoryKind::Env.into(), - ).with_default_tag(); + ); ecx.machine.cmd_line = Some(cmd_ptr); // Store the UTF-16 string. let char_size = Size::from_bytes(2); @@ -214,7 +215,13 @@ pub fn eval_main<'a, 'tcx: 'a>( main_id: DefId, config: MiriConfig, ) { - let mut ecx = create_ecx(tcx, main_id, config).expect("couldn't create ecx"); + let mut ecx = match create_ecx(tcx, main_id, config) { + Ok(ecx) => ecx, + Err(mut err) => { + err.print_backtrace(); + panic!("Miri initialziation error: {}", err.kind) + } + }; // Perform the main execution. let res: EvalResult = (|| { @@ -310,14 +317,14 @@ impl MayLeak for MiriMemoryKind { pub struct Evaluator<'tcx> { /// Environment variables set by `setenv`. /// Miri does not expose env vars from the host to the emulated program. - pub(crate) env_vars: HashMap, Pointer>, + pub(crate) env_vars: HashMap, Pointer>, /// Program arguments (`Option` because we can only initialize them after creating the ecx). /// These are *pointers* to argc/argv because macOS. /// We also need the full command line as one string because of Windows. - pub(crate) argc: Option>, - pub(crate) argv: Option>, - pub(crate) cmd_line: Option>, + pub(crate) argc: Option>, + pub(crate) argv: Option>, + pub(crate) cmd_line: Option>, /// Last OS error. pub(crate) last_error: u32, @@ -328,9 +335,6 @@ pub struct Evaluator<'tcx> { /// Whether to enforce the validity invariant. pub(crate) validate: bool, - /// Stacked Borrows state. - pub(crate) stacked_borrows: stacked_borrows::State, - /// The random number generator to use if Miri /// is running in non-deterministic mode pub(crate) rng: Option @@ -346,7 +350,6 @@ impl<'tcx> Evaluator<'tcx> { last_error: 0, tls: TlsData::default(), validate, - stacked_borrows: stacked_borrows::State::default(), rng: seed.map(|s| StdRng::seed_from_u64(s)) } } @@ -378,9 +381,9 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { type FrameExtra = stacked_borrows::CallId; type MemoryExtra = stacked_borrows::MemoryState; type AllocExtra = stacked_borrows::Stacks; - type PointerTag = Borrow; + type PointerTag = Tag; - type MemoryMap = MonoHashMap, Allocation)>; + type MemoryMap = MonoHashMap, Allocation)>; const STATIC_KIND: Option = Some(MiriMemoryKind::MutStatic); @@ -394,8 +397,8 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { fn find_fn( ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx, Borrow>], - dest: Option>, + args: &[OpTy<'tcx, Tag>], + dest: Option>, ret: Option, ) -> EvalResult<'tcx, Option<&'mir mir::Mir<'tcx>>> { ecx.find_fn(instance, args, dest, ret) @@ -405,8 +408,8 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { fn call_intrinsic( ecx: &mut rustc_mir::interpret::InterpretCx<'a, 'mir, 'tcx, Self>, instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx, Borrow>], - dest: PlaceTy<'tcx, Borrow>, + args: &[OpTy<'tcx, Tag>], + dest: PlaceTy<'tcx, Tag>, ) -> EvalResult<'tcx> { ecx.call_intrinsic(instance, args, dest) } @@ -415,15 +418,15 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { fn ptr_op( ecx: &rustc_mir::interpret::InterpretCx<'a, 'mir, 'tcx, Self>, bin_op: mir::BinOp, - left: ImmTy<'tcx, Borrow>, - right: ImmTy<'tcx, Borrow>, - ) -> EvalResult<'tcx, (Scalar, bool)> { + left: ImmTy<'tcx, Tag>, + right: ImmTy<'tcx, Tag>, + ) -> EvalResult<'tcx, (Scalar, bool)> { ecx.ptr_op(bin_op, left, right) } fn box_alloc( ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, - dest: PlaceTy<'tcx, Borrow>, + dest: PlaceTy<'tcx, Tag>, ) -> EvalResult<'tcx> { trace!("box_alloc for {:?}", dest.layout.ty); // Call the `exchange_malloc` lang item. @@ -467,7 +470,7 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { def_id: DefId, tcx: TyCtxtAt<'a, 'tcx, 'tcx>, memory_extra: &Self::MemoryExtra, - ) -> EvalResult<'tcx, Cow<'tcx, Allocation>> { + ) -> EvalResult<'tcx, Cow<'tcx, Allocation>> { let attrs = tcx.get_attrs(def_id); let link_name = match attr::first_attr_value_str_by_name(&attrs, "link_name") { Some(name) => name.as_str(), @@ -479,7 +482,7 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { // This should be all-zero, pointer-sized. let size = tcx.data_layout.pointer_size; let data = vec![0; size.bytes() as usize]; - let extra = AllocationExtra::memory_allocated(size, memory_extra); + let extra = Stacks::new(size, Tag::default(), Rc::clone(memory_extra)); Allocation::from_bytes(&data, tcx.data_layout.pointer_align.abi, extra) } _ => return err!(Unimplemented( @@ -499,16 +502,17 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { fn adjust_static_allocation<'b>( alloc: &'b Allocation, memory_extra: &Self::MemoryExtra, - ) -> Cow<'b, Allocation> { - let extra = AllocationExtra::memory_allocated( + ) -> Cow<'b, Allocation> { + let extra = Stacks::new( Size::from_bytes(alloc.bytes.len() as u64), - memory_extra, + Tag::default(), + Rc::clone(memory_extra), ); - let alloc: Allocation = Allocation { + let alloc: Allocation = Allocation { bytes: alloc.bytes.clone(), relocations: Relocations::from_presorted( alloc.relocations.iter() - .map(|&(offset, ((), alloc))| (offset, (Borrow::default(), alloc))) + .map(|&(offset, ((), alloc))| (offset, (Tag::default(), alloc))) .collect() ), undef_mask: alloc.undef_mask.clone(), @@ -519,46 +523,30 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { Cow::Owned(alloc) } - fn tag_dereference( - ecx: &InterpretCx<'a, 'mir, 'tcx, Self>, - place: MPlaceTy<'tcx, Borrow>, - mutability: Option, - ) -> EvalResult<'tcx, Scalar> { - let size = ecx.size_and_align_of_mplace(place)?.map(|(size, _)| size) - // For extern types, just cover what we can. - .unwrap_or_else(|| place.layout.size); - if !ecx.tcx.sess.opts.debugging_opts.mir_emit_retag || - !Self::enforce_validity(ecx) || size == Size::ZERO - { - // No tracking. - Ok(place.ptr) - } else { - ecx.ptr_dereference(place, size, mutability.into())?; - // We never change the pointer. - Ok(place.ptr) - } + #[inline(always)] + fn new_allocation( + size: Size, + extra: &Self::MemoryExtra, + kind: MemoryKind, + ) -> (Self::AllocExtra, Self::PointerTag) { + Stacks::new_allocation(size, extra, kind) } #[inline(always)] - fn tag_new_allocation( - ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, - ptr: Pointer, - kind: MemoryKind, - ) -> Pointer { - if !ecx.machine.validate { - // No tracking. - ptr.with_default_tag() - } else { - let tag = ecx.tag_new_allocation(ptr.alloc_id, kind); - Pointer::new_with_tag(ptr.alloc_id, ptr.offset, tag) - } + fn tag_dereference( + _ecx: &InterpretCx<'a, 'mir, 'tcx, Self>, + place: MPlaceTy<'tcx, Tag>, + _mutability: Option, + ) -> EvalResult<'tcx, Scalar> { + // Nothing happens. + Ok(place.ptr) } #[inline(always)] fn retag( ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, kind: mir::RetagKind, - place: PlaceTy<'tcx, Borrow>, + place: PlaceTy<'tcx, Tag>, ) -> EvalResult<'tcx> { if !ecx.tcx.sess.opts.debugging_opts.mir_emit_retag || !Self::enforce_validity(ecx) { // No tracking, or no retagging. The latter is possible because a dependency of ours diff --git a/src/operator.rs b/src/operator.rs index 45c0e63542..386fc4307b 100644 --- a/src/operator.rs +++ b/src/operator.rs @@ -7,39 +7,39 @@ pub trait EvalContextExt<'tcx> { fn ptr_op( &self, bin_op: mir::BinOp, - left: ImmTy<'tcx, Borrow>, - right: ImmTy<'tcx, Borrow>, - ) -> EvalResult<'tcx, (Scalar, bool)>; + left: ImmTy<'tcx, Tag>, + right: ImmTy<'tcx, Tag>, + ) -> EvalResult<'tcx, (Scalar, bool)>; fn ptr_int_arithmetic( &self, bin_op: mir::BinOp, - left: Pointer, + left: Pointer, right: u128, signed: bool, - ) -> EvalResult<'tcx, (Scalar, bool)>; + ) -> EvalResult<'tcx, (Scalar, bool)>; fn ptr_eq( &self, - left: Scalar, - right: Scalar, + left: Scalar, + right: Scalar, ) -> EvalResult<'tcx, bool>; fn pointer_offset_inbounds( &self, - ptr: Scalar, + ptr: Scalar, pointee_ty: Ty<'tcx>, offset: i64, - ) -> EvalResult<'tcx, Scalar>; + ) -> EvalResult<'tcx, Scalar>; } impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, 'tcx> { fn ptr_op( &self, bin_op: mir::BinOp, - left: ImmTy<'tcx, Borrow>, - right: ImmTy<'tcx, Borrow>, - ) -> EvalResult<'tcx, (Scalar, bool)> { + left: ImmTy<'tcx, Tag>, + right: ImmTy<'tcx, Tag>, + ) -> EvalResult<'tcx, (Scalar, bool)> { use rustc::mir::BinOp::*; trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right); @@ -136,8 +136,8 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' fn ptr_eq( &self, - left: Scalar, - right: Scalar, + left: Scalar, + right: Scalar, ) -> EvalResult<'tcx, bool> { let size = self.pointer_size(); Ok(match (left, right) { @@ -233,13 +233,13 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' fn ptr_int_arithmetic( &self, bin_op: mir::BinOp, - left: Pointer, + left: Pointer, right: u128, signed: bool, - ) -> EvalResult<'tcx, (Scalar, bool)> { + ) -> EvalResult<'tcx, (Scalar, bool)> { use rustc::mir::BinOp::*; - fn map_to_primval((res, over): (Pointer, bool)) -> (Scalar, bool) { + fn map_to_primval((res, over): (Pointer, bool)) -> (Scalar, bool) { (Scalar::Ptr(res), over) } @@ -327,10 +327,10 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' /// allocation, and all the remaining integers pointers their own allocation. fn pointer_offset_inbounds( &self, - ptr: Scalar, + ptr: Scalar, pointee_ty: Ty<'tcx>, offset: i64, - ) -> EvalResult<'tcx, Scalar> { + ) -> EvalResult<'tcx, Scalar> { // FIXME: assuming here that type size is less than `i64::max_value()`. let pointee_size = self.layout_of(pointee_ty)?.size.bytes() as i64; let offset = offset diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index bea6aaf9cf..250def0c7c 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -1,127 +1,164 @@ use std::cell::RefCell; use std::collections::HashSet; use std::rc::Rc; +use std::fmt; +use std::num::NonZeroU64; use rustc::ty::{self, layout::Size}; -use rustc::hir::{Mutability, MutMutable, MutImmutable}; +use rustc::hir::{MutMutable, MutImmutable}; use rustc::mir::RetagKind; use crate::{ EvalResult, InterpError, MiriEvalContext, HelpersEvalContextExt, Evaluator, MutValueVisitor, - MemoryKind, MiriMemoryKind, RangeMap, AllocId, Allocation, AllocationExtra, + MemoryKind, MiriMemoryKind, RangeMap, Allocation, AllocationExtra, Pointer, Immediate, ImmTy, PlaceTy, MPlaceTy, }; -pub type Timestamp = u64; -pub type CallId = u64; +pub type PtrId = NonZeroU64; +pub type CallId = NonZeroU64; -/// Information about which kind of borrow was used to create the reference this is tagged with. +/// Tracking pointer provenance #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] -pub enum Borrow { - /// A unique (mutable) reference. - Uniq(Timestamp), - /// An aliasing reference. This is also used by raw pointers, which do not track details - /// of how or when they were created, hence the timestamp is optional. - /// `Shr(Some(_))` does *not* mean that the destination of this reference is frozen; - /// that depends on the type! Only those parts outside of an `UnsafeCell` are actually - /// frozen. - Alias(Option), +pub enum Tag { + Tagged(PtrId), + Untagged, } -impl Borrow { - #[inline(always)] - pub fn is_aliasing(self) -> bool { +impl fmt::Display for Tag { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Borrow::Alias(_) => true, - _ => false, - } - } - - #[inline(always)] - pub fn is_unique(self) -> bool { - match self { - Borrow::Uniq(_) => true, - _ => false, + Tag::Tagged(id) => write!(f, "{}", id), + Tag::Untagged => write!(f, ""), } } } -impl Default for Borrow { - fn default() -> Self { - Borrow::Alias(None) - } +/// Indicates which permission is granted (by this item to some pointers) +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum Permission { + /// Grants unique mutable access. + Unique, + /// Grants shared mutable access. + SharedReadWrite, + /// Greants shared read-only access. + SharedReadOnly, } /// An item in the per-location borrow stack. #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] -pub enum BorStackItem { - /// Indicates the unique reference that may mutate. - Uniq(Timestamp), - /// Indicates that the location has been mutably shared. Used for raw pointers as - /// well as for unfrozen shared references. - Raw, - /// A barrier, tracking the function it belongs to by its index on the call stack. - FnBarrier(CallId) +pub struct Item { + /// The permission this item grants. + perm: Permission, + /// The pointers the permission is granted to. + tag: Tag, + /// An optional protector, ensuring the item cannot get popped until `CallId` is over. + protector: Option, +} + +impl fmt::Display for Item { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "[{:?} for {}", self.perm, self.tag)?; + if let Some(call) = self.protector { + write!(f, " (call {})", call)?; + } + write!(f, "]")?; + Ok(()) + } } /// Extra per-location state. #[derive(Clone, Debug, PartialEq, Eq)] pub struct Stack { - /// Used as the stack; never empty. - borrows: Vec, - /// A virtual frozen "item" on top of the stack. - frozen_since: Option, + /// Used *mostly* as a stack; never empty. + /// We sometimes push into the middle but never remove from the middle. + /// The same tag may occur multiple times, e.g. from a two-phase borrow. + /// Invariants: + /// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`. + borrows: Vec, } -impl Stack { - #[inline(always)] - pub fn is_frozen(&self) -> bool { - self.frozen_since.is_some() - } + +/// Extra per-allocation state. +#[derive(Clone, Debug)] +pub struct Stacks { + // Even reading memory can have effects on the stack, so we need a `RefCell` here. + stacks: RefCell>, + // Pointer to global state + global: MemoryState, } -/// Indicates which kind of reference is being used. -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] -pub enum RefKind { - /// `&mut`. - Unique, - /// `&` without interior mutability. - Frozen, - /// `*` (raw pointer) or `&` to `UnsafeCell`. - Raw, +/// Extra global state, available to the memory access hooks. +#[derive(Debug)] +pub struct GlobalState { + next_ptr_id: PtrId, + next_call_id: CallId, + active_calls: HashSet, } +pub type MemoryState = Rc>; /// Indicates which kind of access is being performed. #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] pub enum AccessKind { Read, Write, - Dealloc, } -/// Extra global state in the memory, available to the memory access hooks. -#[derive(Debug)] -pub struct BarrierTracking { - next_id: CallId, - active_calls: HashSet, +impl fmt::Display for AccessKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + AccessKind::Read => write!(f, "read"), + AccessKind::Write => write!(f, "write"), + } + } +} + +/// Indicates which kind of reference is being created. +/// Used by high-level `reborrow` to compute which permissions to grant to the +/// new pointer. +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum RefKind { + /// `&mut` and `Box`. + Unique, + /// `&` with or without interior mutability. + Shared, + /// `*mut`/`*const` (raw pointers). + Raw { mutable: bool }, +} + +impl fmt::Display for RefKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + RefKind::Unique => write!(f, "unique"), + RefKind::Shared => write!(f, "shared"), + RefKind::Raw { mutable: true } => write!(f, "raw (mutable)"), + RefKind::Raw { mutable: false } => write!(f, "raw (constant)"), + } + } } -pub type MemoryState = Rc>; -impl Default for BarrierTracking { +/// Utilities for initialization and ID generation +impl Default for GlobalState { fn default() -> Self { - BarrierTracking { - next_id: 0, + GlobalState { + next_ptr_id: NonZeroU64::new(1).unwrap(), + next_call_id: NonZeroU64::new(1).unwrap(), active_calls: HashSet::default(), } } } -impl BarrierTracking { +impl GlobalState { + pub fn new_ptr(&mut self) -> PtrId { + let id = self.next_ptr_id; + self.next_ptr_id = NonZeroU64::new(id.get() + 1).unwrap(); + id + } + pub fn new_call(&mut self) -> CallId { - let id = self.next_id; + let id = self.next_call_id; trace!("new_call: Assigning ID {}", id); self.active_calls.insert(id); - self.next_id += 1; + self.next_call_id = NonZeroU64::new(id.get() + 1).unwrap(); id } @@ -134,441 +171,442 @@ impl BarrierTracking { } } -/// Extra global machine state. -#[derive(Clone, Debug)] -pub struct State { - clock: Timestamp -} +// # Stacked Borrows Core Begin -impl Default for State { - fn default() -> Self { - State { clock: 0 } +/// We need to make at least the following things true: +/// +/// U1: After creating a `Uniq`, it is at the top. +/// U2: If the top is `Uniq`, accesses must be through that `Uniq` or remove it it. +/// U3: If an access happens with a `Uniq`, it requires the `Uniq` to be in the stack. +/// +/// F1: After creating a `&`, the parts outside `UnsafeCell` have our `SharedReadOnly` on top. +/// F2: If a write access happens, it pops the `SharedReadOnly`. This has three pieces: +/// F2a: If a write happens granted by an item below our `SharedReadOnly`, the `SharedReadOnly` +/// gets popped. +/// F2b: No `SharedReadWrite` or `Unique` will ever be added on top of our `SharedReadOnly`. +/// F3: If an access happens with an `&` outside `UnsafeCell`, +/// it requires the `SharedReadOnly` to still be in the stack. + +impl Default for Tag { + #[inline(always)] + fn default() -> Tag { + Tag::Untagged } } -impl State { - fn increment_clock(&mut self) -> Timestamp { - let val = self.clock; - self.clock = val + 1; - val +/// Core relations on `Permission` define which accesses are allowed: +/// On every access, we try to find a *granting* item, and then we remove all +/// *incompatible* items above it. +impl Permission { + /// This defines for a given permission, whether it permits the given kind of access. + fn grants(self, access: AccessKind) -> bool { + match (self, access) { + // Unique and SharedReadWrite allow any kind of access. + (Permission::Unique, _) | + (Permission::SharedReadWrite, _) => + true, + // SharedReadOnly only permits read access. + (Permission::SharedReadOnly, AccessKind::Read) => + true, + (Permission::SharedReadOnly, AccessKind::Write) => + false, + } } -} -/// Extra per-allocation state. -#[derive(Clone, Debug)] -pub struct Stacks { - // Even reading memory can have effects on the stack, so we need a `RefCell` here. - stacks: RefCell>, - barrier_tracking: MemoryState, + /// This defines for a given permission, which other permissions it can tolerate "above" itself + /// for which kinds of accesses. + /// If true, then `other` is allowed to remain on top of `self` when `access` happens. + fn compatible_with(self, access: AccessKind, other: Permission) -> bool { + use self::Permission::*; + + match (self, access, other) { + // Some cases are impossible. + (SharedReadOnly, _, SharedReadWrite) | + (SharedReadOnly, _, Unique) => + bug!("There can never be a SharedReadWrite or a Unique on top of a SharedReadOnly"), + // When `other` is `SharedReadOnly`, that is NEVER compatible with + // write accesses. + // This makes sure read-only pointers become invalid on write accesses (ensures F2a). + (_, AccessKind::Write, SharedReadOnly) => + false, + // When `other` is `Unique`, that is compatible with nothing. + // This makes sure unique pointers become invalid on incompatible accesses (ensures U2). + (_, _, Unique) => + false, + // When we are unique and this is a write/dealloc, we tolerate nothing. + // This makes sure we re-assert uniqueness ("being on top") on write accesses. + // (This is particularily important such that when a new mutable ref gets created, it gets + // pushed onto the right item -- this behaves like a write and we assert uniqueness of the + // pointer from which this comes, *if* it was a unique pointer.) + (Unique, AccessKind::Write, _) => + false, + // `SharedReadWrite` items can tolerate any other akin items for any kind of access. + (SharedReadWrite, _, SharedReadWrite) => + true, + // Any item can tolerate read accesses for shared items. + // This includes unique items! Reads from unique pointers do not invalidate + // other pointers. + (_, AccessKind::Read, SharedReadWrite) | + (_, AccessKind::Read, SharedReadOnly) => + true, + // That's it. + } + } } -/// Core per-location operations: deref, access, create. -/// We need to make at least the following things true: -/// -/// U1: After creating a `Uniq`, it is at the top (and unfrozen). -/// U2: If the top is `Uniq` (and unfrozen), accesses must be through that `Uniq` or pop it. -/// U3: If an access (deref sufficient?) happens with a `Uniq`, it requires the `Uniq` to be in the stack. -/// -/// F1: After creating a `&`, the parts outside `UnsafeCell` are frozen. -/// F2: If a write access happens, it unfreezes. -/// F3: If an access (well, a deref) happens with an `&` outside `UnsafeCell`, -/// it requires the location to still be frozen. +/// Core per-location operations: access, dealloc, reborrow. impl<'tcx> Stack { - /// Deref `bor`: check if the location is frozen and the tag in the stack. - /// This dos *not* constitute an access! "Deref" refers to the `*` operator - /// in Rust, and includs cases like `&*x` or `(*x).foo` where no or only part - /// of the memory actually gets accessed. Also we cannot know if we are - /// going to read or write. - /// Returns the index of the item we matched, `None` if it was the frozen one. - /// `kind` indicates which kind of reference is being dereferenced. - fn deref( - &self, - bor: Borrow, - kind: RefKind, - ) -> Result, String> { - // Exclude unique ref with frozen tag. - if let (RefKind::Unique, Borrow::Alias(Some(_))) = (kind, bor) { - return Err(format!("encountered mutable reference with frozen tag ({:?})", bor)); - } - // Checks related to freezing. - match bor { - Borrow::Alias(Some(bor_t)) if kind == RefKind::Frozen => { - // We need the location to be frozen. This ensures F3. - let frozen = self.frozen_since.map_or(false, |itm_t| itm_t <= bor_t); - return if frozen { Ok(None) } else { - Err(format!("location is not frozen long enough")) + /// Find the item granting the given kind of access to the given tag, and where that item is in the stack. + fn find_granting(&self, access: AccessKind, tag: Tag) -> Option<(usize, Permission)> { + self.borrows.iter() + .enumerate() // we also need to know *where* in the stack + .rev() // search top-to-bottom + // Return permission of first item that grants access. + // We require a permission with the right tag, ensuring U3 and F3. + .find_map(|(idx, item)| + if item.perm.grants(access) && tag == item.tag { + Some((idx, item.perm)) + } else { + None } - } - Borrow::Alias(_) if self.frozen_since.is_some() => { - // Shared deref to frozen location; looking good. - return Ok(None) - } - // Not sufficient; go on looking. - _ => {} - } - // If we got here, we have to look for our item in the stack. - for (idx, &itm) in self.borrows.iter().enumerate().rev() { - match (itm, bor) { - (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => { - // Found matching unique item. This satisfies U3. - return Ok(Some(idx)) - } - (BorStackItem::Raw, Borrow::Alias(_)) => { - // Found matching aliasing/raw item. - return Ok(Some(idx)) + ) + } + + /// Test if a memory `access` using pointer tagged `tag` is granted. + /// If yes, return the index of the item that granted it. + fn access( + &mut self, + access: AccessKind, + tag: Tag, + global: &GlobalState, + ) -> EvalResult<'tcx, usize> { + // Two main steps: Find granting item, remove all incompatible items above. + + // Step 1: Find granting item. + let (granting_idx, granting_perm) = self.find_granting(access, tag) + .ok_or_else(|| InterpError::MachineError(format!( + "no item granting {} access to tag {} found in borrow stack", + access, tag, + )))?; + + // Step 2: Remove everything incompatible above them. Make sure we do not remove protected + // items. + // We do *not* maintain a stack discipline here. We could, in principle, decide to only + // keep the items immediately above `granting_idx` that are compatible, and then pop the rest. + // However, that kills off entire "branches" of pointer derivation too easily: + // in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement would pop the `Unique` + // from the reborrow of the first statement, and subsequently also pop the `SharedReadWrite` for `raw`. + // This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared + // reference and use that. + { + // Implemented with indices because there does not seem to be a nice iterator and range-based + // API for this. + let mut cur = granting_idx + 1; + while let Some(item) = self.borrows.get(cur) { + if granting_perm.compatible_with(access, item.perm) { + // Keep this, check next. + cur += 1; + } else { + // Aha! This is a bad one, remove it, and make sure it is not protected. + let item = self.borrows.remove(cur); + if let Some(call) = item.protector { + if global.is_active(call) { + return err!(MachineError(format!( + "not granting {} access to tag {} because incompatible item {} is protected", + access, tag, item + ))); + } + } + trace!("access: removing item {}", item); } - // Go on looking. We ignore barriers! When an `&mut` and an `&` alias, - // dereferencing the `&` is still possible (to reborrow), but doing - // an access is not. - _ => {} } } - // If we got here, we did not find our item. We have to error to satisfy U3. - Err(format!("Borrow being dereferenced ({:?}) does not exist on the borrow stack", bor)) + + // Done. + return Ok(granting_idx); } - /// Performs an actual memory access using `bor`. We do not know any types here - /// or whether things should be frozen, but we *do* know if this is reading - /// or writing. - fn access( + /// Deallocate a location: Like a write access, but also there must be no + /// active protectors at all. + fn dealloc( &mut self, - bor: Borrow, - kind: AccessKind, - barrier_tracking: &BarrierTracking, + tag: Tag, + global: &GlobalState, ) -> EvalResult<'tcx> { - // Check if we can match the frozen "item". - // Not possible on writes! - if self.is_frozen() { - if kind == AccessKind::Read { - // When we are frozen, we just accept all reads. No harm in this. - // The deref already checked that `Uniq` items are in the stack, and that - // the location is frozen if it should be. - return Ok(()); - } - trace!("access: unfreezing"); - } - // Unfreeze on writes. This ensures F2. - self.frozen_since = None; - // Pop the stack until we have something matching. - while let Some(&itm) = self.borrows.last() { - match (itm, bor) { - (BorStackItem::FnBarrier(call), _) if barrier_tracking.is_active(call) => { + // Step 1: Find granting item. + self.find_granting(AccessKind::Write, tag) + .ok_or_else(|| InterpError::MachineError(format!( + "no item granting write access for deallocation to tag {} found in borrow stack", + tag, + )))?; + + // We must make sure there are no protected items remaining on the stack. + // Also clear the stack, no more accesses are possible. + for item in self.borrows.drain(..) { + if let Some(call) = item.protector { + if global.is_active(call) { return err!(MachineError(format!( - "stopping looking for borrow being accessed ({:?}) because of barrier ({})", - bor, call + "deallocating with active protector ({})", call ))) } - (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => { - // Found matching unique item. Continue after the match. - } - (BorStackItem::Raw, _) if kind == AccessKind::Read => { - // When reading, everything can use a raw item! - // We do not want to do this when writing: Writing to an `&mut` - // should reaffirm its exclusivity (i.e., make sure it is - // on top of the stack). Continue after the match. - } - (BorStackItem::Raw, Borrow::Alias(_)) => { - // Found matching raw item. Continue after the match. - } - _ => { - // Pop this, go on. This ensures U2. - let itm = self.borrows.pop().unwrap(); - trace!("access: Popping {:?}", itm); - continue - } - } - // If we got here, we found a matching item. Congratulations! - // However, we are not done yet: If this access is deallocating, we must make sure - // there are no active barriers remaining on the stack. - if kind == AccessKind::Dealloc { - for &itm in self.borrows.iter().rev() { - match itm { - BorStackItem::FnBarrier(call) if barrier_tracking.is_active(call) => { - return err!(MachineError(format!( - "deallocating with active barrier ({})", call - ))) - } - _ => {}, - } - } } - // Now we are done. - return Ok(()) } - // If we got here, we did not find our item. - err!(MachineError(format!( - "borrow being accessed ({:?}) does not exist on the borrow stack", - bor - ))) + + Ok(()) } - /// Initiate `bor`; mostly this means pushing. - /// This operation cannot fail; it is up to the caller to ensure that the precondition - /// is met: We cannot push `Uniq` onto frozen stacks. - /// `kind` indicates which kind of reference is being created. - fn create(&mut self, bor: Borrow, kind: RefKind) { - // When creating a frozen reference, freeze. This ensures F1. - // We also do *not* push anything else to the stack, making sure that no nother kind - // of access (like writing through raw pointers) is permitted. - if kind == RefKind::Frozen { - let bor_t = match bor { - Borrow::Alias(Some(t)) => t, - _ => bug!("Creating illegal borrow {:?} for frozen ref", bor), - }; - // It is possible that we already are frozen (e.g., if we just pushed a barrier, - // the redundancy check would not have kicked in). - match self.frozen_since { - Some(loc_t) => assert!( - loc_t <= bor_t, - "trying to freeze location for longer than it was already frozen" - ), - None => { - trace!("create: Freezing"); - self.frozen_since = Some(bor_t); + /// `reborrow` helper function: test that the stack invariants are still maintained. + fn test_invariants(&self) { + let mut saw_shared_read_only = false; + for item in self.borrows.iter() { + match item.perm { + Permission::SharedReadOnly => { + saw_shared_read_only = true; + } + // Otherwise, if we saw one before, that's a bug. + perm if saw_shared_read_only => { + bug!("Found {:?} on top of a SharedReadOnly!", perm); } + _ => {} } - return; } - assert!( - self.frozen_since.is_none(), - "trying to create non-frozen reference to frozen location" - ); - - // Push new item to the stack. - let itm = match bor { - Borrow::Uniq(t) => BorStackItem::Uniq(t), - Borrow::Alias(_) => BorStackItem::Raw, + } + + /// Derived a new pointer from one with the given tag. + /// `weak` controls whether this is a weak reborrow: weak reborrows do not act as + /// accesses, and they add the new item directly on top of the one it is derived + /// from instead of all the way at the top of the stack. + fn reborrow( + &mut self, + derived_from: Tag, + weak: bool, + new: Item, + global: &GlobalState, + ) -> EvalResult<'tcx> { + // Figure out which access `perm` corresponds to. + let access = if new.perm.grants(AccessKind::Write) { + AccessKind::Write + } else { + AccessKind::Read }; - if *self.borrows.last().unwrap() == itm { - // This is just an optimization, no functional change: Avoid stacking - // multiple `Shr` on top of each other. - assert!(bor.is_aliasing()); - trace!("create: sharing a shared location is a NOP"); + // Now we figure out which item grants our parent (`derived_from`) this kind of access. + // We use that to determine where to put the new item. + let (derived_from_idx, _) = self.find_granting(access, derived_from) + .ok_or_else(|| InterpError::MachineError(format!( + "no item to reborrow for {:?} from tag {} found in borrow stack", new.perm, derived_from, + )))?; + + // Compute where to put the new item. + // Either way, we ensure that we insert the new item in a way that between + // `derived_from` and the new one, there are only items *compatible with* `derived_from`. + let new_idx = if weak { + // A very liberal reborrow because the new pointer does not expect any kind of aliasing guarantee. + // Just insert new permission as child of old permission, and maintain everything else. + // This inserts "as far down as possible", which is good because it makes this pointer as + // long-lived as possible *and* we want all the items that are incompatible with this + // to actually get removed from the stack. If we pushed a `SharedReadWrite` on top of + // a `SharedReadOnly`, we'd violate the invariant that `SaredReadOnly` are at the top + // and we'd allow write access without invalidating frozen shared references! + // This ensures F2b for `SharedReadWrite` by adding the new item below any + // potentially existing `SharedReadOnly`. + derived_from_idx + 1 } else { - // This ensures U1. - trace!("create: pushing {:?}", itm); - self.borrows.push(itm); - } - } + // A "safe" reborrow for a pointer that actually expects some aliasing guarantees. + // Here, creating a reference actually counts as an access, and pops incompatible + // stuff off the stack. + // This ensures F2b for `Unique`, by removing offending `SharedReadOnly`. + let check_idx = self.access(access, derived_from, global)?; + assert_eq!(check_idx, derived_from_idx, "somehow we saw different items??"); + + // We insert "as far up as possible": We know only compatible items are remaining + // on top of `derived_from`, and we want the new item at the top so that we + // get the strongest possible guarantees. + // This ensures U1 and F1. + self.borrows.len() + }; - /// Adds a barrier. - fn barrier(&mut self, call: CallId) { - let itm = BorStackItem::FnBarrier(call); - if *self.borrows.last().unwrap() == itm { - // This is just an optimization, no functional change: Avoid stacking - // multiple identical barriers on top of each other. - // This can happen when a function receives several shared references - // that overlap. - trace!("barrier: avoiding redundant extra barrier"); + // Put the new item there. As an optimization, deduplicate if it is equal to one of its new neighbors. + if self.borrows[new_idx-1] == new || self.borrows.get(new_idx) == Some(&new) { + // Optimization applies, done. + trace!("reborrow: avoiding adding redundant item {}", new); } else { - trace!("barrier: pushing barrier for call {}", call); - self.borrows.push(itm); + trace!("reborrow: adding item {}", new); + self.borrows.insert(new_idx, new); + } + + // Make sure that after all this, the stack's invariant is still maintained. + if cfg!(debug_assertions) { + self.test_invariants(); } + + Ok(()) } } +// # Stacked Borrows Core End -/// Higher-level per-location operations: deref, access, reborrow. +/// Map per-stack operations to higher-level per-location-range operations. impl<'tcx> Stacks { - /// Checks that this stack is fine with being dereferenced. - fn deref( - &self, - ptr: Pointer, + /// Creates new stack with initial tag. + pub(crate) fn new( size: Size, - kind: RefKind, - ) -> EvalResult<'tcx> { - trace!("deref for tag {:?} as {:?}: {:?}, size {}", - ptr.tag, kind, ptr, size.bytes()); - let stacks = self.stacks.borrow(); - for stack in stacks.iter(ptr.offset, size) { - stack.deref(ptr.tag, kind).map_err(InterpError::MachineError)?; + tag: Tag, + extra: MemoryState, + ) -> Self { + let item = Item { perm: Permission::Unique, tag, protector: None }; + let stack = Stack { + borrows: vec![item], + }; + Stacks { + stacks: RefCell::new(RangeMap::new(size, stack)), + global: extra, } - Ok(()) } - /// `ptr` got used, reflect that in the stack. - fn access( + /// Call `f` on every stack in the range. + fn for_each( &self, - ptr: Pointer, + ptr: Pointer, size: Size, - kind: AccessKind, + f: impl Fn(&mut Stack, &GlobalState) -> EvalResult<'tcx>, ) -> EvalResult<'tcx> { - trace!("{:?} access of tag {:?}: {:?}, size {}", kind, ptr.tag, ptr, size.bytes()); - // Even reads can have a side-effect, by invalidating other references. - // This is fundamentally necessary since `&mut` asserts that there - // are no accesses through other references, not even reads. - let barrier_tracking = self.barrier_tracking.borrow(); + let global = self.global.borrow(); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { - stack.access(ptr.tag, kind, &*barrier_tracking)?; + f(stack, &*global)?; } Ok(()) } +} - /// Reborrow the given pointer to the new tag for the given kind of reference. - /// This works on `&self` because we might encounter references to constant memory. - fn reborrow( - &self, - ptr: Pointer, +/// Glue code to connect with Miri Machine Hooks +impl Stacks { + pub fn new_allocation( size: Size, - mut barrier: Option, - new_bor: Borrow, - new_kind: RefKind, - ) -> EvalResult<'tcx> { - assert_eq!(new_bor.is_unique(), new_kind == RefKind::Unique); - trace!( - "reborrow for tag {:?} to {:?} as {:?}: {:?}, size {}", - ptr.tag, new_bor, new_kind, ptr, size.bytes(), - ); - if new_kind == RefKind::Raw { - // No barrier for raw, including `&UnsafeCell`. They can rightfully alias with `&mut`. - // FIXME: This means that the `dereferencable` attribute on non-frozen shared references - // is incorrect! They are dereferencable when the function is called, but might become - // non-dereferencable during the course of execution. - // Also see [1], [2]. - // - // [1]: , - // [2]: - barrier = None; - } - let barrier_tracking = self.barrier_tracking.borrow(); - let mut stacks = self.stacks.borrow_mut(); - for stack in stacks.iter_mut(ptr.offset, size) { - // Access source `ptr`, create new ref. - let ptr_idx = stack.deref(ptr.tag, new_kind).map_err(InterpError::MachineError)?; - // If we can deref the new tag already, and if that tag lives higher on - // the stack than the one we come from, just use that. - // That is, we check if `new_bor` *already* is "derived from" `ptr.tag`. - // This also checks frozenness, if required. - let bor_redundant = barrier.is_none() && - match (ptr_idx, stack.deref(new_bor, new_kind)) { - // If the new borrow works with the frozen item, or else if it lives - // above the old one in the stack, our job here is done. - (_, Ok(None)) => true, - (Some(ptr_idx), Ok(Some(new_idx))) if new_idx >= ptr_idx => true, - // Otherwise, we need to create a new borrow. - _ => false, - }; - if bor_redundant { - assert!(new_bor.is_aliasing(), "a unique reborrow can never be redundant"); - trace!("reborrow is redundant"); - continue; + extra: &MemoryState, + kind: MemoryKind, + ) -> (Self, Tag) { + let tag = match kind { + MemoryKind::Stack => { + // New unique borrow. This `Uniq` is not accessible by the program, + // so it will only ever be used when using the local directly (i.e., + // not through a pointer). That is, whenever we directly use a local, this will pop + // everything else off the stack, invalidating all previous pointers, + // and in particular, *all* raw pointers. This subsumes the explicit + // `reset` which the blog post [1] says to perform when accessing a local. + // + // [1]: + Tag::Tagged(extra.borrow_mut().new_ptr()) } - // We need to do some actual work. - let access_kind = if new_kind == RefKind::Unique { - AccessKind::Write - } else { - AccessKind::Read - }; - stack.access(ptr.tag, access_kind, &*barrier_tracking)?; - if let Some(call) = barrier { - stack.barrier(call); + _ => { + Tag::Untagged } - stack.create(new_bor, new_kind); - } - Ok(()) - } -} - -/// Hooks and glue. -impl AllocationExtra for Stacks { - #[inline(always)] - fn memory_allocated<'tcx>(size: Size, extra: &MemoryState) -> Self { - let stack = Stack { - borrows: vec![BorStackItem::Raw], - frozen_since: None, }; - Stacks { - stacks: RefCell::new(RangeMap::new(size, stack)), - barrier_tracking: Rc::clone(extra), - } + let stack = Stacks::new(size, tag, Rc::clone(extra)); + (stack, tag) } +} +impl AllocationExtra for Stacks { #[inline(always)] fn memory_read<'tcx>( - alloc: &Allocation, - ptr: Pointer, + alloc: &Allocation, + ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, AccessKind::Read) + trace!("read access with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes()); + alloc.extra.for_each(ptr, size, |stack, global| { + stack.access(AccessKind::Read, ptr.tag, global)?; + Ok(()) + }) } #[inline(always)] fn memory_written<'tcx>( - alloc: &mut Allocation, - ptr: Pointer, + alloc: &mut Allocation, + ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, AccessKind::Write) + trace!("write access with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes()); + alloc.extra.for_each(ptr, size, |stack, global| { + stack.access(AccessKind::Write, ptr.tag, global)?; + Ok(()) + }) } #[inline(always)] fn memory_deallocated<'tcx>( - alloc: &mut Allocation, - ptr: Pointer, + alloc: &mut Allocation, + ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, AccessKind::Dealloc) - } -} - -impl<'tcx> Stacks { - /// Pushes the first item to the stacks. - pub(crate) fn first_item( - &mut self, - itm: BorStackItem, - size: Size - ) { - for stack in self.stacks.get_mut().iter_mut(Size::ZERO, size) { - assert!(stack.borrows.len() == 1); - assert_eq!(stack.borrows.pop().unwrap(), BorStackItem::Raw); - stack.borrows.push(itm); - } + trace!("deallocation with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes()); + alloc.extra.for_each(ptr, size, |stack, global| { + stack.dealloc(ptr.tag, global) + }) } } +/// Retagging/reborrowing. There is some policy in here, such as which permissions +/// to grant for which references, when to add protectors, and how to realize two-phase +/// borrows in terms of the primitives above. impl<'a, 'mir, 'tcx> EvalContextPrivExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {} trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> { fn reborrow( &mut self, - place: MPlaceTy<'tcx, Borrow>, + place: MPlaceTy<'tcx, Tag>, size: Size, - fn_barrier: bool, - new_bor: Borrow + kind: RefKind, + new_tag: Tag, + force_weak: bool, + protect: bool, ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); + let protector = if protect { Some(this.frame().extra) } else { None }; let ptr = place.ptr.to_ptr()?; - let barrier = if fn_barrier { Some(this.frame().extra) } else { None }; - trace!("reborrow: creating new reference for {:?} (pointee {}): {:?}", - ptr, place.layout.ty, new_bor); + trace!("reborrow: {:?} reference {} derived from {} (pointee {}): {:?}, size {}", + kind, new_tag, ptr.tag, place.layout.ty, ptr, size.bytes()); // Get the allocation. It might not be mutable, so we cannot use `get_mut`. let alloc = this.memory().get(ptr.alloc_id)?; alloc.check_bounds(this, ptr, size)?; // Update the stacks. - if let Borrow::Alias(Some(_)) = new_bor { - // Reference that cares about freezing. We need a frozen-sensitive reborrow. - this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { - let kind = if frozen { RefKind::Frozen } else { RefKind::Raw }; - alloc.extra.reborrow(cur_ptr, size, barrier, new_bor, kind) - })?; - } else { - // Just treat this as one big chunk. - let kind = if new_bor.is_unique() { RefKind::Unique } else { RefKind::Raw }; - alloc.extra.reborrow(ptr, size, barrier, new_bor, kind)?; - } - Ok(()) + // Make sure that raw pointers and mutable shared references are reborrowed "weak": + // There could be existing unique pointers reborrowed from them that should remain valid! + let perm = match kind { + RefKind::Unique => Permission::Unique, + RefKind::Raw { mutable: true } => Permission::SharedReadWrite, + RefKind::Shared | RefKind::Raw { mutable: false } => { + // Shared references and *const are a whole different kind of game, the + // permission is not uniform across the entire range! + // We need a frozen-sensitive reborrow. + return this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { + // We are only ever `SharedReadOnly` inside the frozen bits. + let weak = !frozen || kind != RefKind::Shared; // `RefKind::Raw` is always weak, as is `SharedReadWrite`. + let perm = if frozen { Permission::SharedReadOnly } else { Permission::SharedReadWrite }; + let item = Item { perm, tag: new_tag, protector }; + alloc.extra.for_each(cur_ptr, size, |stack, global| { + stack.reborrow(cur_ptr.tag, force_weak || weak, item, global) + }) + }); + } + }; + debug_assert_ne!(perm, Permission::SharedReadOnly, "SharedReadOnly must be used frozen-sensitive"); + let weak = perm == Permission::SharedReadWrite; + let item = Item { perm, tag: new_tag, protector }; + alloc.extra.for_each(ptr, size, |stack, global| { + stack.reborrow(ptr.tag, force_weak || weak, item, global) + }) } /// Retags an indidual pointer, returning the retagged version. /// `mutbl` can be `None` to make this a raw pointer. fn retag_reference( &mut self, - val: ImmTy<'tcx, Borrow>, - mutbl: Option, - fn_barrier: bool, + val: ImmTy<'tcx, Tag>, + kind: RefKind, + protect: bool, two_phase: bool, - ) -> EvalResult<'tcx, Immediate> { + ) -> EvalResult<'tcx, Immediate> { let this = self.eval_context_mut(); // We want a place for where the ptr *points to*, so we get one. let place = this.ref_to_mplace(val)?; @@ -581,23 +619,22 @@ trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, } // Compute new borrow. - let time = this.machine.stacked_borrows.increment_clock(); - let new_bor = match mutbl { - Some(MutMutable) => Borrow::Uniq(time), - Some(MutImmutable) => Borrow::Alias(Some(time)), - None => Borrow::default(), + let new_tag = match kind { + RefKind::Raw { .. } => Tag::Untagged, + _ => Tag::Tagged(this.memory().extra.borrow_mut().new_ptr()), }; // Reborrow. - this.reborrow(place, size, fn_barrier, new_bor)?; - let new_place = place.with_tag(new_bor); + this.reborrow(place, size, kind, new_tag, /*force_weak:*/ two_phase, protect)?; + let new_place = place.replace_tag(new_tag); // Handle two-phase borrows. if two_phase { - assert!(mutbl == Some(MutMutable), "two-phase shared borrows make no sense"); - // We immediately share it, to allow read accesses - let two_phase_time = this.machine.stacked_borrows.increment_clock(); - let two_phase_bor = Borrow::Alias(Some(two_phase_time)); - this.reborrow(new_place, size, false /* fn_barrier */, two_phase_bor)?; + assert!(kind == RefKind::Unique, "two-phase shared borrows make no sense"); + // Grant read access *to the parent pointer* with the old tag *derived from the new tag* (`new_place`). + // This means the old pointer has multiple items in the stack now, which otherwise cannot happen + // for unique references -- but in this case it precisely expresses the semantics we want. + let old_tag = place.ptr.to_ptr().unwrap().tag; + this.reborrow(new_place, size, RefKind::Shared, old_tag, /*force_weak:*/ false, /*protect:*/ false)?; } // Return new pointer. @@ -607,104 +644,28 @@ trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, impl<'a, 'mir, 'tcx> EvalContextExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {} pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> { - fn tag_new_allocation( - &mut self, - id: AllocId, - kind: MemoryKind, - ) -> Borrow { - let this = self.eval_context_mut(); - let time = match kind { - MemoryKind::Stack => { - // New unique borrow. This `Uniq` is not accessible by the program, - // so it will only ever be used when using the local directly (i.e., - // not through a pointer). That is, whenever we directly use a local, this will pop - // everything else off the stack, invalidating all previous pointers, - // and in particular, *all* raw pointers. This subsumes the explicit - // `reset` which the blog post [1] says to perform when accessing a local. - // - // [1]: - this.machine.stacked_borrows.increment_clock() - } - _ => { - // Nothing to do for everything else. - return Borrow::default() - } - }; - // Make this the active borrow for this allocation. - let alloc = this - .memory_mut() - .get_mut(id) - .expect("this is a new allocation; it must still exist"); - let size = Size::from_bytes(alloc.bytes.len() as u64); - alloc.extra.first_item(BorStackItem::Uniq(time), size); - Borrow::Uniq(time) - } - - /// Called for value-to-place conversion. `mutability` is `None` for raw pointers. - /// - /// Note that this does *not* mean that all this memory will actually get accessed/referenced! - /// We could be in the middle of `&(*var).1`. - fn ptr_dereference( - &self, - place: MPlaceTy<'tcx, Borrow>, - size: Size, - mutability: Option, - ) -> EvalResult<'tcx> { - let this = self.eval_context_ref(); - trace!( - "ptr_dereference: Accessing {} reference for {:?} (pointee {})", - if let Some(mutability) = mutability { - format!("{:?}", mutability) - } else { - format!("raw") - }, - place.ptr, place.layout.ty - ); - let ptr = place.ptr.to_ptr()?; - if mutability.is_none() { - // No further checks on raw derefs -- only the access itself will be checked. - return Ok(()); - } - - // Get the allocation - let alloc = this.memory().get(ptr.alloc_id)?; - alloc.check_bounds(this, ptr, size)?; - // If we got here, we do some checking, *but* we leave the tag unchanged. - if let Borrow::Alias(Some(_)) = ptr.tag { - assert_eq!(mutability, Some(MutImmutable)); - // We need a frozen-sensitive check. - this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { - let kind = if frozen { RefKind::Frozen } else { RefKind::Raw }; - alloc.extra.deref(cur_ptr, size, kind) - })?; - } else { - // Just treat this as one big chunk. - let kind = if mutability == Some(MutMutable) { RefKind::Unique } else { RefKind::Raw }; - alloc.extra.deref(ptr, size, kind)?; - } - - // All is good. - Ok(()) - } - fn retag( &mut self, kind: RetagKind, - place: PlaceTy<'tcx, Borrow> + place: PlaceTy<'tcx, Tag> ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); - // Determine mutability and whether to add a barrier. + // Determine mutability and whether to add a protector. // Cannot use `builtin_deref` because that reports *immutable* for `Box`, // making it useless. - fn qualify(ty: ty::Ty<'_>, kind: RetagKind) -> Option<(Option, bool)> { + fn qualify(ty: ty::Ty<'_>, kind: RetagKind) -> Option<(RefKind, bool)> { match ty.sty { // References are simple. - ty::Ref(_, _, mutbl) => Some((Some(mutbl), kind == RetagKind::FnEntry)), + ty::Ref(_, _, MutMutable) => + Some((RefKind::Unique, kind == RetagKind::FnEntry)), + ty::Ref(_, _, MutImmutable) => + Some((RefKind::Shared, kind == RetagKind::FnEntry)), // Raw pointers need to be enabled. - ty::RawPtr(..) if kind == RetagKind::Raw => Some((None, false)), - // Boxes do not get a barrier: barriers reflect that references outlive the call + ty::RawPtr(tym) if kind == RetagKind::Raw => + Some((RefKind::Raw { mutable: tym.mutbl == MutMutable }, false)), + // Boxes do not get a protector: protectors reflect that references outlive the call // they were passed in to; that's just not the case for boxes. - ty::Adt(..) if ty.is_box() => Some((Some(MutMutable), false)), + ty::Adt(..) if ty.is_box() => Some((RefKind::Unique, false)), _ => None, } } @@ -712,10 +673,10 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, // We need a visitor to visit all references. However, that requires // a `MemPlace`, so we have a fast path for reference types that // avoids allocating. - if let Some((mutbl, barrier)) = qualify(place.layout.ty, kind) { + if let Some((mutbl, protector)) = qualify(place.layout.ty, kind) { // Fast path. let val = this.read_immediate(this.place_to_op(place)?)?; - let val = this.retag_reference(val, mutbl, barrier, kind == RetagKind::TwoPhase)?; + let val = this.retag_reference(val, mutbl, protector, kind == RetagKind::TwoPhase)?; this.write_immediate(val, place)?; return Ok(()); } @@ -734,7 +695,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, for RetagVisitor<'ecx, 'a, 'mir, 'tcx> { - type V = MPlaceTy<'tcx, Borrow>; + type V = MPlaceTy<'tcx, Tag>; #[inline(always)] fn ecx(&mut self) -> &mut MiriEvalContext<'a, 'mir, 'tcx> { @@ -742,16 +703,16 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, } // Primitives of reference type, that is the one thing we are interested in. - fn visit_primitive(&mut self, place: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + fn visit_primitive(&mut self, place: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { // Cannot use `builtin_deref` because that reports *immutable* for `Box`, // making it useless. - if let Some((mutbl, barrier)) = qualify(place.layout.ty, self.kind) { + if let Some((mutbl, protector)) = qualify(place.layout.ty, self.kind) { let val = self.ecx.read_immediate(place.into())?; let val = self.ecx.retag_reference( val, mutbl, - barrier, + protector, self.kind == RetagKind::TwoPhase )?; self.ecx.write_immediate(val, place.into())?; diff --git a/src/tls.rs b/src/tls.rs index 992e4fd056..9346fba0dc 100644 --- a/src/tls.rs +++ b/src/tls.rs @@ -5,14 +5,14 @@ use rustc::{ty, ty::layout::HasDataLayout, mir}; use crate::{ EvalResult, InterpError, StackPopCleanup, - MPlaceTy, Scalar, Borrow, + MPlaceTy, Scalar, Tag, }; pub type TlsKey = u128; #[derive(Copy, Clone, Debug)] pub struct TlsEntry<'tcx> { - pub(crate) data: Scalar, // Will eventually become a map from thread IDs to `Scalar`s, if we ever support more than one thread. + pub(crate) data: Scalar, // Will eventually become a map from thread IDs to `Scalar`s, if we ever support more than one thread. pub(crate) dtor: Option>, } @@ -63,7 +63,7 @@ impl<'tcx> TlsData<'tcx> { } } - pub fn load_tls(&mut self, key: TlsKey) -> EvalResult<'tcx, Scalar> { + pub fn load_tls(&mut self, key: TlsKey) -> EvalResult<'tcx, Scalar> { match self.keys.get(&key) { Some(&TlsEntry { data, .. }) => { trace!("TLS key {} loaded: {:?}", key, data); @@ -73,7 +73,7 @@ impl<'tcx> TlsData<'tcx> { } } - pub fn store_tls(&mut self, key: TlsKey, new_data: Scalar) -> EvalResult<'tcx> { + pub fn store_tls(&mut self, key: TlsKey, new_data: Scalar) -> EvalResult<'tcx> { match self.keys.get_mut(&key) { Some(&mut TlsEntry { ref mut data, .. }) => { trace!("TLS key {} stored: {:?}", key, new_data); @@ -106,7 +106,7 @@ impl<'tcx> TlsData<'tcx> { &mut self, key: Option, cx: &impl HasDataLayout, - ) -> Option<(ty::Instance<'tcx>, Scalar, TlsKey)> { + ) -> Option<(ty::Instance<'tcx>, Scalar, TlsKey)> { use std::collections::Bound::*; let thread_local = &mut self.keys; diff --git a/tests/compile-fail/stacked_borrows/alias_through_mutation.rs b/tests/compile-fail/stacked_borrows/alias_through_mutation.rs index 30f5921202..4a153d74ff 100644 --- a/tests/compile-fail/stacked_borrows/alias_through_mutation.rs +++ b/tests/compile-fail/stacked_borrows/alias_through_mutation.rs @@ -9,5 +9,5 @@ fn main() { retarget(&mut target_alias, target); // now `target_alias` points to the same thing as `target` *target = 13; - let _val = *target_alias; //~ ERROR does not exist on the borrow stack + let _val = *target_alias; //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/aliasing_mut1.rs b/tests/compile-fail/stacked_borrows/aliasing_mut1.rs index 9bced43f6e..d047925163 100644 --- a/tests/compile-fail/stacked_borrows/aliasing_mut1.rs +++ b/tests/compile-fail/stacked_borrows/aliasing_mut1.rs @@ -1,6 +1,6 @@ use std::mem; -pub fn safe(_x: &mut i32, _y: &mut i32) {} //~ ERROR barrier +pub fn safe(_x: &mut i32, _y: &mut i32) {} //~ ERROR protect fn main() { let mut x = 0; diff --git a/tests/compile-fail/stacked_borrows/aliasing_mut2.rs b/tests/compile-fail/stacked_borrows/aliasing_mut2.rs index ea24f1bd27..c679e01677 100644 --- a/tests/compile-fail/stacked_borrows/aliasing_mut2.rs +++ b/tests/compile-fail/stacked_borrows/aliasing_mut2.rs @@ -1,6 +1,6 @@ use std::mem; -pub fn safe(_x: &i32, _y: &mut i32) {} //~ ERROR barrier +pub fn safe(_x: &i32, _y: &mut i32) {} //~ ERROR protect fn main() { let mut x = 0; diff --git a/tests/compile-fail/stacked_borrows/aliasing_mut3.rs b/tests/compile-fail/stacked_borrows/aliasing_mut3.rs index e3c59d1566..3943e95761 100644 --- a/tests/compile-fail/stacked_borrows/aliasing_mut3.rs +++ b/tests/compile-fail/stacked_borrows/aliasing_mut3.rs @@ -1,6 +1,6 @@ use std::mem; -pub fn safe(_x: &mut i32, _y: &i32) {} //~ ERROR does not exist on the borrow stack +pub fn safe(_x: &mut i32, _y: &i32) {} //~ ERROR borrow stack fn main() { let mut x = 0; diff --git a/tests/compile-fail/stacked_borrows/aliasing_mut4.rs b/tests/compile-fail/stacked_borrows/aliasing_mut4.rs index 15f67d0f87..778935a6d0 100644 --- a/tests/compile-fail/stacked_borrows/aliasing_mut4.rs +++ b/tests/compile-fail/stacked_borrows/aliasing_mut4.rs @@ -2,7 +2,7 @@ use std::mem; use std::cell::Cell; // Make sure &mut UnsafeCell also is exclusive -pub fn safe(_x: &i32, _y: &mut Cell) {} //~ ERROR barrier +pub fn safe(_x: &i32, _y: &mut Cell) {} //~ ERROR protect fn main() { let mut x = 0; diff --git a/tests/compile-fail/stacked_borrows/box_exclusive_violation1.rs b/tests/compile-fail/stacked_borrows/box_exclusive_violation1.rs index 481915faed..7d7f5e24e2 100644 --- a/tests/compile-fail/stacked_borrows/box_exclusive_violation1.rs +++ b/tests/compile-fail/stacked_borrows/box_exclusive_violation1.rs @@ -8,7 +8,7 @@ fn demo_mut_advanced_unique(mut our: Box) -> i32 { unknown_code_2(); // We know this will return 5 - *our //~ ERROR does not exist on the borrow stack + *our //~ ERROR borrow stack } // Now comes the evil context diff --git a/tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs b/tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs index 98d4e6f229..9ff67ae354 100644 --- a/tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs +++ b/tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs @@ -13,5 +13,5 @@ fn main() { let v1 = safe::as_mut_slice(&v); let _v2 = safe::as_mut_slice(&v); v1[1] = 5; - //~^ ERROR does not exist on the borrow stack + //~^ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs b/tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs index 42f345f551..812dd47ef1 100644 --- a/tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs +++ b/tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs @@ -9,7 +9,7 @@ mod safe { assert!(mid <= len); (from_raw_parts_mut(ptr, len - mid), // BUG: should be "mid" instead of "len - mid" - //~^ ERROR does not exist on the borrow stack + //~^ ERROR borrow stack from_raw_parts_mut(ptr.offset(mid as isize), len - mid)) } } diff --git a/tests/compile-fail/stacked_borrows/deallocate_against_barrier.rs b/tests/compile-fail/stacked_borrows/deallocate_against_barrier.rs index b2f1c824f1..49e376c028 100644 --- a/tests/compile-fail/stacked_borrows/deallocate_against_barrier.rs +++ b/tests/compile-fail/stacked_borrows/deallocate_against_barrier.rs @@ -1,4 +1,4 @@ -// error-pattern: deallocating with active barrier +// error-pattern: deallocating with active protect fn inner(x: &mut i32, f: fn(&mut i32)) { // `f` may mutate, but it may not deallocate! diff --git a/tests/compile-fail/stacked_borrows/illegal_read1.rs b/tests/compile-fail/stacked_borrows/illegal_read1.rs index 3fb38abefd..d942d2b27b 100644 --- a/tests/compile-fail/stacked_borrows/illegal_read1.rs +++ b/tests/compile-fail/stacked_borrows/illegal_read1.rs @@ -7,7 +7,7 @@ fn main() { let xref = unsafe { &mut *xraw }; // derived from raw, so using raw is still ok... callee(xraw); let _val = *xref; // ...but any use of raw will invalidate our ref. - //~^ ERROR: does not exist on the borrow stack + //~^ ERROR: borrow stack } fn callee(xraw: *mut i32) { diff --git a/tests/compile-fail/stacked_borrows/illegal_read2.rs b/tests/compile-fail/stacked_borrows/illegal_read2.rs index e43340f0b8..c50c88d48f 100644 --- a/tests/compile-fail/stacked_borrows/illegal_read2.rs +++ b/tests/compile-fail/stacked_borrows/illegal_read2.rs @@ -7,7 +7,7 @@ fn main() { let xref = unsafe { &mut *xraw }; // derived from raw, so using raw is still ok... callee(xraw); let _val = *xref; // ...but any use of raw will invalidate our ref. - //~^ ERROR: does not exist on the borrow stack + //~^ ERROR: borrow stack } fn callee(xraw: *mut i32) { diff --git a/tests/compile-fail/stacked_borrows/illegal_read3.rs b/tests/compile-fail/stacked_borrows/illegal_read3.rs index b4abbb4a1a..09fd5d534c 100644 --- a/tests/compile-fail/stacked_borrows/illegal_read3.rs +++ b/tests/compile-fail/stacked_borrows/illegal_read3.rs @@ -15,7 +15,7 @@ fn main() { callee(xref1_sneaky); // ... though any use of it will invalidate our ref. let _val = *xref2; - //~^ ERROR: does not exist on the borrow stack + //~^ ERROR: borrow stack } fn callee(xref1: usize) { diff --git a/tests/compile-fail/stacked_borrows/illegal_read4.rs b/tests/compile-fail/stacked_borrows/illegal_read4.rs index bb889de8f8..d7e281e3ff 100644 --- a/tests/compile-fail/stacked_borrows/illegal_read4.rs +++ b/tests/compile-fail/stacked_borrows/illegal_read4.rs @@ -5,5 +5,5 @@ fn main() { let xraw = xref1 as *mut _; let xref2 = unsafe { &mut *xraw }; let _val = unsafe { *xraw }; // use the raw again, this invalidates xref2 *even* with the special read except for uniq refs - let _illegal = *xref2; //~ ERROR does not exist on the borrow stack + let _illegal = *xref2; //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/illegal_read5.rs b/tests/compile-fail/stacked_borrows/illegal_read5.rs index 0f4737f16e..d6120cd64a 100644 --- a/tests/compile-fail/stacked_borrows/illegal_read5.rs +++ b/tests/compile-fail/stacked_borrows/illegal_read5.rs @@ -12,5 +12,5 @@ fn main() { let _val = *xref; // we can even still use our mutable reference mem::forget(unsafe { ptr::read(xshr) }); // but after reading through the shared ref let _val = *xref; // the mutable one is dead and gone - //~^ ERROR does not exist on the borrow stack + //~^ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/illegal_read6.rs b/tests/compile-fail/stacked_borrows/illegal_read6.rs new file mode 100644 index 0000000000..dc37814729 --- /dev/null +++ b/tests/compile-fail/stacked_borrows/illegal_read6.rs @@ -0,0 +1,8 @@ +// Creating a shared reference does not leak the data to raw pointers. +fn main() { unsafe { + let x = &mut 0; + let raw = x as *mut _; + let x = &mut *x; // kill `raw` + let _y = &*x; // this should not activate `raw` again + let _val = *raw; //~ ERROR borrow stack +} } diff --git a/tests/compile-fail/stacked_borrows/illegal_write1.rs b/tests/compile-fail/stacked_borrows/illegal_write1.rs index d0a23cb444..dd262a341e 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write1.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write1.rs @@ -5,5 +5,5 @@ fn main() { let x : *mut u32 = xref as *const _ as *mut _; unsafe { *x = 42; } // invalidates shared ref, activates raw } - let _x = *xref; //~ ERROR is not frozen + let _x = *xref; //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/illegal_write2.rs b/tests/compile-fail/stacked_borrows/illegal_write2.rs index affa21c762..62ea05e181 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write2.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write2.rs @@ -3,6 +3,6 @@ fn main() { let target2 = target as *mut _; drop(&mut *target); // reborrow // Now make sure our ref is still the only one. - unsafe { *target2 = 13; } //~ ERROR does not exist on the borrow stack + unsafe { *target2 = 13; } //~ ERROR borrow stack let _val = *target; } diff --git a/tests/compile-fail/stacked_borrows/illegal_write3.rs b/tests/compile-fail/stacked_borrows/illegal_write3.rs index dc4edcc3a5..d2d8528d90 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write3.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write3.rs @@ -3,6 +3,6 @@ fn main() { // Make sure raw ptr with raw tag cannot mutate frozen location without breaking the shared ref. let r#ref = ⌖ // freeze let ptr = r#ref as *const _ as *mut _; // raw ptr, with raw tag - unsafe { *ptr = 42; } //~ ERROR does not exist on the borrow stack + unsafe { *ptr = 42; } //~ ERROR borrow stack let _val = *r#ref; } diff --git a/tests/compile-fail/stacked_borrows/illegal_write4.rs b/tests/compile-fail/stacked_borrows/illegal_write4.rs index 37ae0f055f..be4f89ba28 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write4.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write4.rs @@ -9,5 +9,5 @@ fn main() { let ptr = reference as *const _ as *mut i32; // raw ptr, with raw tag let _mut_ref: &mut i32 = unsafe { mem::transmute(ptr) }; // &mut, with raw tag // Now we retag, making our ref top-of-stack -- and, in particular, unfreezing. - let _val = *reference; //~ ERROR is not frozen + let _val = *reference; //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/illegal_write5.rs b/tests/compile-fail/stacked_borrows/illegal_write5.rs index 3a0738bfd0..c60fe90fe0 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write5.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write5.rs @@ -8,7 +8,7 @@ fn main() { callee(xraw); // ... though any use of raw value will invalidate our ref. let _val = *xref; - //~^ ERROR: does not exist on the borrow stack + //~^ ERROR: borrow stack } fn callee(xraw: *mut i32) { diff --git a/tests/compile-fail/stacked_borrows/invalidate_against_barrier1.rs b/tests/compile-fail/stacked_borrows/invalidate_against_barrier1.rs index fc0dbb9e13..3a214a75b5 100644 --- a/tests/compile-fail/stacked_borrows/invalidate_against_barrier1.rs +++ b/tests/compile-fail/stacked_borrows/invalidate_against_barrier1.rs @@ -2,7 +2,7 @@ fn inner(x: *mut i32, _y: &mut i32) { // If `x` and `y` alias, retagging is fine with this... but we really // shouldn't be allowed to use `x` at all because `y` was assumed to be // unique for the duration of this call. - let _val = unsafe { *x }; //~ ERROR barrier + let _val = unsafe { *x }; //~ ERROR protect } fn main() { diff --git a/tests/compile-fail/stacked_borrows/invalidate_against_barrier2.rs b/tests/compile-fail/stacked_borrows/invalidate_against_barrier2.rs index a080c0958e..86e4a84287 100644 --- a/tests/compile-fail/stacked_borrows/invalidate_against_barrier2.rs +++ b/tests/compile-fail/stacked_borrows/invalidate_against_barrier2.rs @@ -2,7 +2,7 @@ fn inner(x: *mut i32, _y: &i32) { // If `x` and `y` alias, retagging is fine with this... but we really // shouldn't be allowed to write to `x` at all because `y` was assumed to be // immutable for the duration of this call. - unsafe { *x = 0 }; //~ ERROR barrier + unsafe { *x = 0 }; //~ ERROR protect } fn main() { diff --git a/tests/compile-fail/stacked_borrows/load_invalid_mut.rs b/tests/compile-fail/stacked_borrows/load_invalid_mut.rs index f2e4b36f85..1704b7fe19 100644 --- a/tests/compile-fail/stacked_borrows/load_invalid_mut.rs +++ b/tests/compile-fail/stacked_borrows/load_invalid_mut.rs @@ -5,5 +5,5 @@ fn main() { let xref = unsafe { &mut *xraw }; let xref_in_mem = Box::new(xref); let _val = unsafe { *xraw }; // invalidate xref - let _val = *xref_in_mem; //~ ERROR does not exist on the borrow stack + let _val = *xref_in_mem; //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/load_invalid_shr.rs b/tests/compile-fail/stacked_borrows/load_invalid_shr.rs index 6599924f0f..4757a2c1e5 100644 --- a/tests/compile-fail/stacked_borrows/load_invalid_shr.rs +++ b/tests/compile-fail/stacked_borrows/load_invalid_shr.rs @@ -5,5 +5,5 @@ fn main() { let xref = unsafe { &*xraw }; let xref_in_mem = Box::new(xref); unsafe { *xraw = 42 }; // unfreeze - let _val = *xref_in_mem; //~ ERROR is not frozen + let _val = *xref_in_mem; //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs b/tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs index 3fe6b65674..03343b985a 100644 --- a/tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs +++ b/tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs @@ -21,7 +21,7 @@ fn unknown_code_1(x: &i32) { unsafe { } } fn unknown_code_2() { unsafe { - *LEAK = 7; //~ ERROR barrier + *LEAK = 7; //~ ERROR borrow stack } } fn main() { diff --git a/tests/compile-fail/stacked_borrows/outdated_local.rs b/tests/compile-fail/stacked_borrows/outdated_local.rs index ba36e43e0c..4cb655366e 100644 --- a/tests/compile-fail/stacked_borrows/outdated_local.rs +++ b/tests/compile-fail/stacked_borrows/outdated_local.rs @@ -3,7 +3,7 @@ fn main() { let y: *const i32 = &x; x = 1; // this invalidates y by reactivating the lowermost uniq borrow for this local - assert_eq!(unsafe { *y }, 1); //~ ERROR does not exist on the borrow stack + assert_eq!(unsafe { *y }, 1); //~ ERROR borrow stack assert_eq!(x, 1); } diff --git a/tests/compile-fail/stacked_borrows/pass_invalid_mut.rs b/tests/compile-fail/stacked_borrows/pass_invalid_mut.rs index b239237f01..d8a53b7a96 100644 --- a/tests/compile-fail/stacked_borrows/pass_invalid_mut.rs +++ b/tests/compile-fail/stacked_borrows/pass_invalid_mut.rs @@ -6,5 +6,5 @@ fn main() { let xraw = x as *mut _; let xref = unsafe { &mut *xraw }; let _val = unsafe { *xraw }; // invalidate xref - foo(xref); //~ ERROR does not exist on the borrow stack + foo(xref); //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/pass_invalid_shr.rs b/tests/compile-fail/stacked_borrows/pass_invalid_shr.rs index 22a80e2710..091604a283 100644 --- a/tests/compile-fail/stacked_borrows/pass_invalid_shr.rs +++ b/tests/compile-fail/stacked_borrows/pass_invalid_shr.rs @@ -6,5 +6,5 @@ fn main() { let xraw = x as *mut _; let xref = unsafe { &*xraw }; unsafe { *xraw = 42 }; // unfreeze - foo(xref); //~ ERROR is not frozen + foo(xref); //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/pointer_smuggling.rs b/tests/compile-fail/stacked_borrows/pointer_smuggling.rs index a8207d58e9..f724cdd2a7 100644 --- a/tests/compile-fail/stacked_borrows/pointer_smuggling.rs +++ b/tests/compile-fail/stacked_borrows/pointer_smuggling.rs @@ -8,7 +8,7 @@ fn fun1(x: &mut u8) { fn fun2() { // Now we use a pointer we are not allowed to use - let _x = unsafe { *PTR }; //~ ERROR does not exist on the borrow stack + let _x = unsafe { *PTR }; //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/return_invalid_mut.rs b/tests/compile-fail/stacked_borrows/return_invalid_mut.rs index 31f8a4e33a..54004ec438 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_mut.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_mut.rs @@ -3,7 +3,7 @@ fn foo(x: &mut (i32, i32)) -> &mut i32 { let xraw = x as *mut (i32, i32); let ret = unsafe { &mut (*xraw).1 }; let _val = unsafe { *xraw }; // invalidate xref - ret //~ ERROR does not exist on the borrow stack + ret //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/return_invalid_mut_option.rs b/tests/compile-fail/stacked_borrows/return_invalid_mut_option.rs index 750d507d6f..2eb2df81f5 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_mut_option.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_mut_option.rs @@ -3,7 +3,7 @@ fn foo(x: &mut (i32, i32)) -> Option<&mut i32> { let xraw = x as *mut (i32, i32); let ret = Some(unsafe { &mut (*xraw).1 }); let _val = unsafe { *xraw }; // invalidate xref - ret //~ ERROR does not exist on the borrow stack + ret //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/return_invalid_mut_tuple.rs b/tests/compile-fail/stacked_borrows/return_invalid_mut_tuple.rs index bb712e9e48..8b73df4bd1 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_mut_tuple.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_mut_tuple.rs @@ -3,7 +3,7 @@ fn foo(x: &mut (i32, i32)) -> (&mut i32,) { let xraw = x as *mut (i32, i32); let ret = (unsafe { &mut (*xraw).1 },); let _val = unsafe { *xraw }; // invalidate xref - ret //~ ERROR does not exist on the borrow stack + ret //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/return_invalid_shr.rs b/tests/compile-fail/stacked_borrows/return_invalid_shr.rs index 986dd18b2e..eab026f9a4 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_shr.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_shr.rs @@ -3,7 +3,7 @@ fn foo(x: &mut (i32, i32)) -> &i32 { let xraw = x as *mut (i32, i32); let ret = unsafe { &(*xraw).1 }; unsafe { *xraw = (42, 23) }; // unfreeze - ret //~ ERROR is not frozen + ret //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/return_invalid_shr_option.rs b/tests/compile-fail/stacked_borrows/return_invalid_shr_option.rs index 9d220991c3..f3a35ca266 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_shr_option.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_shr_option.rs @@ -3,7 +3,7 @@ fn foo(x: &mut (i32, i32)) -> Option<&i32> { let xraw = x as *mut (i32, i32); let ret = Some(unsafe { &(*xraw).1 }); unsafe { *xraw = (42, 23) }; // unfreeze - ret //~ ERROR is not frozen + ret //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/return_invalid_shr_tuple.rs b/tests/compile-fail/stacked_borrows/return_invalid_shr_tuple.rs index 060fa25c23..82723bade2 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_shr_tuple.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_shr_tuple.rs @@ -3,7 +3,7 @@ fn foo(x: &mut (i32, i32)) -> (&i32,) { let xraw = x as *mut (i32, i32); let ret = (unsafe { &(*xraw).1 },); unsafe { *xraw = (42, 23) }; // unfreeze - ret //~ ERROR is not frozen + ret //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak1.rs b/tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak1.rs new file mode 100644 index 0000000000..d734caf1d9 --- /dev/null +++ b/tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak1.rs @@ -0,0 +1,14 @@ +// We want to test that granting a SharedReadWrite will be added +// *below* an already granted Unique -- so writing to +// the SharedReadWrite will invalidate the Unique. + +use std::mem; +use std::cell::Cell; + +fn main() { unsafe { + let x = &mut Cell::new(0); + let y: &mut Cell = mem::transmute(&mut *x); // launder lifetime + let shr_rw = &*x; // thanks to interior mutability this will be a SharedReadWrite + shr_rw.set(1); + y.get_mut(); //~ ERROR borrow stack +} } diff --git a/tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak2.rs b/tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak2.rs new file mode 100644 index 0000000000..942bb503db --- /dev/null +++ b/tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak2.rs @@ -0,0 +1,14 @@ +// We want to test that granting a SharedReadWrite will be added +// *below* an already granted SharedReadWrite -- so writing to +// the SharedReadWrite will invalidate the SharedReadWrite. + +use std::mem; +use std::cell::RefCell; + +fn main() { unsafe { + let x = &mut RefCell::new(0); + let y: &i32 = mem::transmute(&*x.borrow()); // launder lifetime + let shr_rw = &*x; // thanks to interior mutability this will be a SharedReadWrite + shr_rw.replace(1); + let _val = *y; //~ ERROR borrow stack +} } diff --git a/tests/compile-fail/stacked_borrows/shr_frozen_violation1.rs b/tests/compile-fail/stacked_borrows/shr_frozen_violation1.rs index 560c9dfb66..5031210c54 100644 --- a/tests/compile-fail/stacked_borrows/shr_frozen_violation1.rs +++ b/tests/compile-fail/stacked_borrows/shr_frozen_violation1.rs @@ -8,9 +8,6 @@ fn main() { println!("{}", foo(&mut 0)); } -// If we replace the `*const` by `&`, my current dev version of miri -// *does* find the problem, but not for a good reason: It finds it because -// of barriers, and we shouldn't rely on unknown code using barriers. -fn unknown_code(x: *const i32) { - unsafe { *(x as *mut i32) = 7; } //~ ERROR barrier +fn unknown_code(x: &i32) { + unsafe { *(x as *const i32 as *mut i32) = 7; } //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/static_memory_modification.rs b/tests/compile-fail/stacked_borrows/static_memory_modification.rs index c092cbfe50..88ac164947 100644 --- a/tests/compile-fail/stacked_borrows/static_memory_modification.rs +++ b/tests/compile-fail/stacked_borrows/static_memory_modification.rs @@ -3,6 +3,6 @@ static X: usize = 5; #[allow(mutable_transmutes)] fn main() { let _x = unsafe { - std::mem::transmute::<&usize, &mut usize>(&X) //~ ERROR mutable reference with frozen tag + std::mem::transmute::<&usize, &mut usize>(&X) //~ ERROR borrow stack }; } diff --git a/tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs b/tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs index 45ada88977..e9282c5ba8 100644 --- a/tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs +++ b/tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs @@ -10,5 +10,5 @@ fn main() { let _raw: *mut i32 = unsafe { mem::transmute(&mut x[0]) }; // `raw` still carries a tag, so we get another pointer to the same location that does not carry a tag let raw = (&mut x[1] as *mut i32).wrapping_offset(-1); - unsafe { *raw = 13; } //~ ERROR does not exist on the borrow stack + unsafe { *raw = 13; } //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/unescaped_local.rs b/tests/compile-fail/stacked_borrows/unescaped_local.rs index 1db14ea7ed..b49e6cce63 100644 --- a/tests/compile-fail/stacked_borrows/unescaped_local.rs +++ b/tests/compile-fail/stacked_borrows/unescaped_local.rs @@ -4,5 +4,5 @@ fn main() { let mut x = 42; let raw = &mut x as *mut i32 as usize as *mut i32; let _ptr = &mut x; - unsafe { *raw = 13; } //~ ERROR does not exist on the borrow stack + unsafe { *raw = 13; } //~ ERROR borrow stack } diff --git a/tests/run-pass/ptr_arith_offset.rs b/tests/run-pass/ptr_arith_offset.rs index 7912da9fd4..a6ee151e3e 100644 --- a/tests/run-pass/ptr_arith_offset.rs +++ b/tests/run-pass/ptr_arith_offset.rs @@ -1,6 +1,6 @@ fn main() { let v = [1i16, 2]; - let x = &v as *const i16; + let x = &v as *const [i16] as *const i16; let x = x.wrapping_offset(1); assert_eq!(unsafe { *x }, 2); } diff --git a/tests/run-pass/ptr_offset.rs b/tests/run-pass/ptr_offset.rs index 9e2e26fad3..1c7f0eb717 100644 --- a/tests/run-pass/ptr_offset.rs +++ b/tests/run-pass/ptr_offset.rs @@ -2,7 +2,7 @@ fn f() -> i32 { 42 } fn main() { let v = [1i16, 2]; - let x = &v as *const i16; + let x = &v as *const [i16; 2] as *const i16; let x = unsafe { x.offset(1) }; assert_eq!(unsafe { *x }, 2); diff --git a/tests/run-pass/refcell.rs b/tests/run-pass/refcell.rs index 0bc8b15c5f..93cef1572a 100644 --- a/tests/run-pass/refcell.rs +++ b/tests/run-pass/refcell.rs @@ -1,6 +1,6 @@ use std::cell::RefCell; -fn lots_of_funny_borrows() { +fn main() { let c = RefCell::new(42); { let s1 = c.borrow(); @@ -31,47 +31,3 @@ fn lots_of_funny_borrows() { let _y: i32 = *s2; } } - -fn aliasing_mut_and_shr() { - fn inner(rc: &RefCell, aliasing: &mut i32) { - *aliasing += 4; - let _escape_to_raw = rc as *const _; - *aliasing += 4; - let _shr = &*rc; - *aliasing += 4; - // also turning this into a frozen ref now must work - let aliasing = &*aliasing; - let _val = *aliasing; - let _escape_to_raw = rc as *const _; // this must NOT unfreeze - let _val = *aliasing; - let _shr = &*rc; // this must NOT unfreeze - let _val = *aliasing; - } - - let rc = RefCell::new(23); - let mut bmut = rc.borrow_mut(); - inner(&rc, &mut *bmut); - drop(bmut); - assert_eq!(*rc.borrow(), 23+12); -} - -fn aliasing_frz_and_shr() { - fn inner(rc: &RefCell, aliasing: &i32) { - let _val = *aliasing; - let _escape_to_raw = rc as *const _; // this must NOT unfreeze - let _val = *aliasing; - let _shr = &*rc; // this must NOT unfreeze - let _val = *aliasing; - } - - let rc = RefCell::new(23); - let bshr = rc.borrow(); - inner(&rc, &*bshr); - assert_eq!(*rc.borrow(), 23); -} - -fn main() { - lots_of_funny_borrows(); - aliasing_mut_and_shr(); - aliasing_frz_and_shr(); -} diff --git a/tests/run-pass/regions-mock-trans.rs b/tests/run-pass/regions-mock-trans.rs index ac8a1c04fb..020ed4927a 100644 --- a/tests/run-pass/regions-mock-trans.rs +++ b/tests/run-pass/regions-mock-trans.rs @@ -22,14 +22,14 @@ struct Ccx { x: isize } -fn alloc<'a>(_bcx : &'a Arena) -> &'a Bcx<'a> { +fn alloc<'a>(_bcx : &'a Arena) -> &'a mut Bcx<'a> { unsafe { mem::transmute(libc::malloc(mem::size_of::>() as libc::size_t)) } } -fn h<'a>(bcx : &'a Bcx<'a>) -> &'a Bcx<'a> { +fn h<'a>(bcx : &'a Bcx<'a>) -> &'a mut Bcx<'a> { return alloc(bcx.fcx.arena); } diff --git a/tests/run-pass/2phase.rs b/tests/run-pass/stacked-borrows/2phase.rs similarity index 65% rename from tests/run-pass/2phase.rs rename to tests/run-pass/stacked-borrows/2phase.rs index 57f3631143..97f435472e 100644 --- a/tests/run-pass/2phase.rs +++ b/tests/run-pass/stacked-borrows/2phase.rs @@ -1,3 +1,5 @@ +#![allow(mutable_borrow_reservation_conflict)] + trait S: Sized { fn tpb(&mut self, _s: Self) {} } @@ -26,7 +28,21 @@ fn two_phase3(b: bool) { )); } -/* +#[allow(unreachable_code)] +fn two_phase_raw() { + let x: &mut Vec = &mut vec![]; + x.push( + { + // Unfortunately this does not trigger the problem of creating a + // raw ponter from a pointer that had a two-phase borrow derived from + // it because of the implicit &mut reborrow. + let raw = x as *mut _; + unsafe { *raw = vec![1]; } + return + } + ); +} + fn two_phase_overlapping1() { let mut x = vec![]; let p = &x; @@ -39,7 +55,6 @@ fn two_phase_overlapping2() { let l = &x; x.add_assign(x + *l); } -*/ fn with_interior_mutability() { use std::cell::Cell; @@ -53,7 +68,6 @@ fn with_interior_mutability() { let mut x = Cell::new(1); let l = &x; - #[allow(unknown_lints, mutable_borrow_reservation_conflict)] x .do_the_thing({ x.set(3); @@ -68,8 +82,8 @@ fn main() { two_phase2(); two_phase3(false); two_phase3(true); + two_phase_raw(); with_interior_mutability(); - //FIXME: enable these, or remove them, depending on how https://github.com/rust-lang/rust/issues/56254 gets resolved - //two_phase_overlapping1(); - //two_phase_overlapping2(); + two_phase_overlapping1(); + two_phase_overlapping2(); } diff --git a/tests/run-pass/stacked-borrows/interior_mutability.rs b/tests/run-pass/stacked-borrows/interior_mutability.rs new file mode 100644 index 0000000000..33f44d0093 --- /dev/null +++ b/tests/run-pass/stacked-borrows/interior_mutability.rs @@ -0,0 +1,59 @@ +#![feature(maybe_uninit, maybe_uninit_ref)] +use std::mem::MaybeUninit; +use std::cell::Cell; +use std::cell::RefCell; + +fn main() { + aliasing_mut_and_shr(); + aliasing_frz_and_shr(); + into_interior_mutability(); +} + +fn aliasing_mut_and_shr() { + fn inner(rc: &RefCell, aliasing: &mut i32) { + *aliasing += 4; + let _escape_to_raw = rc as *const _; + *aliasing += 4; + let _shr = &*rc; + *aliasing += 4; + // also turning this into a frozen ref now must work + let aliasing = &*aliasing; + let _val = *aliasing; + let _escape_to_raw = rc as *const _; // this must NOT unfreeze + let _val = *aliasing; + let _shr = &*rc; // this must NOT unfreeze + let _val = *aliasing; + } + + let rc = RefCell::new(23); + let mut bmut = rc.borrow_mut(); + inner(&rc, &mut *bmut); + drop(bmut); + assert_eq!(*rc.borrow(), 23+12); +} + +fn aliasing_frz_and_shr() { + fn inner(rc: &RefCell, aliasing: &i32) { + let _val = *aliasing; + let _escape_to_raw = rc as *const _; // this must NOT unfreeze + let _val = *aliasing; + let _shr = &*rc; // this must NOT unfreeze + let _val = *aliasing; + } + + let rc = RefCell::new(23); + let bshr = rc.borrow(); + inner(&rc, &*bshr); + assert_eq!(*rc.borrow(), 23); +} + +// Getting a pointer into a union with interior mutability used to be tricky +// business (https://github.com/rust-lang/miri/issues/615), but it should work +// now. +fn into_interior_mutability() { + let mut x: MaybeUninit<(Cell, u32)> = MaybeUninit::uninit(); + x.as_ptr(); + x.write((Cell::new(0), 1)); + let ptr = unsafe { x.get_ref() }; + assert_eq!(ptr.1, 1); +} diff --git a/tests/run-pass/stacked-borrows.rs b/tests/run-pass/stacked-borrows/stacked-borrows.rs similarity index 82% rename from tests/run-pass/stacked-borrows.rs rename to tests/run-pass/stacked-borrows/stacked-borrows.rs index 711026c02d..7d84e33b3d 100644 --- a/tests/run-pass/stacked-borrows.rs +++ b/tests/run-pass/stacked-borrows/stacked-borrows.rs @@ -10,6 +10,8 @@ fn main() { partially_invalidate_mut(); drop_after_sharing(); direct_mut_to_const_raw(); + two_raw(); + shr_and_raw(); } // Deref a raw ptr to access a field of a large struct, where the field @@ -123,3 +125,27 @@ fn direct_mut_to_const_raw() { assert_eq!(*x, 1); */ } + +// Make sure that we can create two raw pointers from a mutable reference and use them both. +fn two_raw() { unsafe { + let x = &mut 0; + // Given the implicit reborrows, the only reason this currently works is that we + // do not track raw pointers: The creation of `y2` reborrows `x` and thus pops + // `y1` off the stack. + let y1 = x as *mut _; + let y2 = x as *mut _; + *y1 += 2; + *y2 += 1; +} } + +// Make sure that creating a *mut does not invalidate existing shared references. +fn shr_and_raw() { /* unsafe { + use std::mem; + // FIXME: This is currently disabled because "as *mut _" incurs a reborrow. + let x = &mut 0; + let y1: &i32 = mem::transmute(&*x); // launder lifetimes + let y2 = x as *mut _; + let _val = *y1; + *y2 += 1; + // TODO: Once this works, add compile-fail test that tries to read from y1 again. +} */ }