Skip to content

Commit d136fdb

Browse files
authored
Merge pull request #513 from RalfJung/new-interior-mut
New Stacked Borrows, now with better support for interior mutability
2 parents d0b79cf + d694dc4 commit d136fdb

30 files changed

+472
-346
lines changed

cargo-miri-test/run-test.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,8 @@
1010
def test_cargo_miri():
1111
print("==> Testing `cargo miri` <==")
1212
## Call `cargo miri`, capture all output
13-
# FIXME: Disabling validation, still investigating whether there is UB here
1413
p = subprocess.Popen(
15-
["cargo", "miri", "-q", "--", "-Zmiri-disable-validation"],
14+
["cargo", "miri", "-q"],
1615
stdout=subprocess.PIPE,
1716
stderr=subprocess.PIPE
1817
)

rust-version

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
nightly-2018-11-07
1+
nightly-2018-11-08

src/helpers.rs

Lines changed: 159 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use std::mem;
22

3-
use rustc::ty;
3+
use rustc::ty::{self, layout};
44
use rustc::hir::def_id::{DefId, CRATE_DEF_INDEX};
55

66
use crate::*;
@@ -32,6 +32,15 @@ impl<Tag> ScalarExt for ScalarMaybeUndef<Tag> {
3232

3333
pub trait EvalContextExt<'tcx> {
3434
fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>>;
35+
36+
/// Visit the memory covered by `place` that is frozen -- i.e., NOT
37+
/// what is inside an `UnsafeCell`.
38+
fn visit_frozen(
39+
&self,
40+
place: MPlaceTy<'tcx, Borrow>,
41+
size: Size,
42+
action: impl FnMut(Pointer<Borrow>, Size) -> EvalResult<'tcx>,
43+
) -> EvalResult<'tcx>;
3544
}
3645

3746

@@ -69,4 +78,153 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:
6978
EvalErrorKind::PathNotFound(path).into()
7079
})
7180
}
81+
82+
/// Visit the memory covered by `place` that is frozen -- i.e., NOT
83+
/// what is inside an `UnsafeCell`.
84+
fn visit_frozen(
85+
&self,
86+
place: MPlaceTy<'tcx, Borrow>,
87+
size: Size,
88+
mut frozen_action: impl FnMut(Pointer<Borrow>, Size) -> EvalResult<'tcx>,
89+
) -> EvalResult<'tcx> {
90+
trace!("visit_frozen(place={:?}, size={:?})", *place, size);
91+
debug_assert_eq!(size,
92+
self.size_and_align_of_mplace(place)?
93+
.map(|(size, _)| size)
94+
.unwrap_or_else(|| place.layout.size)
95+
);
96+
// Store how far we proceeded into the place so far. Everything to the left of
97+
// this offset has already been handled, in the sense that the frozen parts
98+
// have had `action` called on them.
99+
let mut end_ptr = place.ptr;
100+
// Called when we detected an `UnsafeCell` at the given offset and size.
101+
// Calls `action` and advances `end_ptr`.
102+
let mut unsafe_cell_action = |unsafe_cell_offset, unsafe_cell_size| {
103+
// We assume that we are given the fields in increasing offset order,
104+
// and nothing else changes.
105+
let end_offset = end_ptr.get_ptr_offset(self);
106+
assert!(unsafe_cell_offset >= end_offset);
107+
let frozen_size = unsafe_cell_offset - end_offset;
108+
// Everything between the end_ptr and this `UnsafeCell` is frozen.
109+
if frozen_size != Size::ZERO {
110+
frozen_action(end_ptr.to_ptr()?, frozen_size)?;
111+
}
112+
// Update end end_ptr.
113+
end_ptr = end_ptr.ptr_wrapping_offset(frozen_size+unsafe_cell_size, self);
114+
// Done
115+
Ok(())
116+
};
117+
// Run a visitor
118+
{
119+
let mut visitor = UnsafeCellVisitor {
120+
ecx: self,
121+
unsafe_cell_action: |place| {
122+
trace!("unsafe_cell_action on {:?}", place.ptr);
123+
// We need a size to go on.
124+
let (unsafe_cell_size, _) = self.size_and_align_of_mplace(place)?
125+
// for extern types, just cover what we can
126+
.unwrap_or_else(|| place.layout.size_and_align());
127+
// Now handle this `UnsafeCell`, unless it is empty.
128+
if unsafe_cell_size != Size::ZERO {
129+
unsafe_cell_action(place.ptr.get_ptr_offset(self), unsafe_cell_size)
130+
} else {
131+
Ok(())
132+
}
133+
},
134+
};
135+
visitor.visit_value(place)?;
136+
}
137+
// The part between the end_ptr and the end of the place is also frozen.
138+
// So pretend there is a 0-sized `UnsafeCell` at the end.
139+
unsafe_cell_action(place.ptr.get_ptr_offset(self) + size, Size::ZERO)?;
140+
// Done!
141+
return Ok(());
142+
143+
/// Visiting the memory covered by a `MemPlace`, being aware of
144+
/// whether we are inside an `UnsafeCell` or not.
145+
struct UnsafeCellVisitor<'ecx, 'a, 'mir, 'tcx, F>
146+
where F: FnMut(MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx>
147+
{
148+
ecx: &'ecx MiriEvalContext<'a, 'mir, 'tcx>,
149+
unsafe_cell_action: F,
150+
}
151+
152+
impl<'ecx, 'a, 'mir, 'tcx, F> ValueVisitor<'a, 'mir, 'tcx, Evaluator<'tcx>>
153+
for UnsafeCellVisitor<'ecx, 'a, 'mir, 'tcx, F>
154+
where
155+
F: FnMut(MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx>
156+
{
157+
type V = MPlaceTy<'tcx, Borrow>;
158+
159+
#[inline(always)]
160+
fn ecx(&self) -> &MiriEvalContext<'a, 'mir, 'tcx> {
161+
&self.ecx
162+
}
163+
164+
// Hook to detect `UnsafeCell`
165+
fn visit_value(&mut self, v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx>
166+
{
167+
trace!("UnsafeCellVisitor: {:?} {:?}", *v, v.layout.ty);
168+
let is_unsafe_cell = match v.layout.ty.sty {
169+
ty::Adt(adt, _) => Some(adt.did) == self.ecx.tcx.lang_items().unsafe_cell_type(),
170+
_ => false,
171+
};
172+
if is_unsafe_cell {
173+
// We do not have to recurse further, this is an `UnsafeCell`.
174+
(self.unsafe_cell_action)(v)
175+
} else if self.ecx.type_is_freeze(v.layout.ty) {
176+
// This is `Freeze`, there cannot be an `UnsafeCell`
177+
Ok(())
178+
} else {
179+
// Proceed further
180+
self.walk_value(v)
181+
}
182+
}
183+
184+
// Make sure we visit aggregrates in increasing offset order
185+
fn visit_aggregate(
186+
&mut self,
187+
place: MPlaceTy<'tcx, Borrow>,
188+
fields: impl Iterator<Item=EvalResult<'tcx, MPlaceTy<'tcx, Borrow>>>,
189+
) -> EvalResult<'tcx> {
190+
match place.layout.fields {
191+
layout::FieldPlacement::Array { .. } => {
192+
// For the array layout, we know the iterator will yield sorted elements so
193+
// we can avoid the allocation.
194+
self.walk_aggregate(place, fields)
195+
}
196+
layout::FieldPlacement::Arbitrary { .. } => {
197+
// Gather the subplaces and sort them before visiting.
198+
let mut places = fields.collect::<EvalResult<'tcx, Vec<MPlaceTy<'tcx, Borrow>>>>()?;
199+
places[..].sort_by_key(|place| place.ptr.get_ptr_offset(self.ecx()));
200+
self.walk_aggregate(place, places.into_iter().map(Ok))
201+
}
202+
layout::FieldPlacement::Union { .. } => {
203+
// Uh, what?
204+
bug!("A union is not an aggregate we should ever visit")
205+
}
206+
}
207+
}
208+
209+
// We have to do *something* for unions
210+
fn visit_union(&mut self, v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx>
211+
{
212+
// With unions, we fall back to whatever the type says, to hopefully be consistent
213+
// with LLVM IR.
214+
// FIXME Are we consistent? And is this really the behavior we want?
215+
let frozen = self.ecx.type_is_freeze(v.layout.ty);
216+
if frozen {
217+
Ok(())
218+
} else {
219+
(self.unsafe_cell_action)(v)
220+
}
221+
}
222+
223+
// We should never get to a primitive, but always short-circuit somewhere above
224+
fn visit_primitive(&mut self, _val: ImmTy<'tcx, Borrow>) -> EvalResult<'tcx>
225+
{
226+
bug!("We should always short-circit before coming to a primitive")
227+
}
228+
}
229+
}
72230
}

src/lib.rs

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ use std::collections::HashMap;
1717
use std::borrow::Cow;
1818
use std::env;
1919

20-
use rustc::ty::{self, Ty, TyCtxt, query::TyCtxtAt};
20+
use rustc::ty::{self, TyCtxt, query::TyCtxtAt};
2121
use rustc::ty::layout::{TyLayout, LayoutOf, Size};
2222
use rustc::hir::{self, def_id::DefId};
2323
use rustc::mir;
@@ -48,7 +48,7 @@ use crate::mono_hash_map::MonoHashMap;
4848
use crate::stacked_borrows::{EvalContextExt as StackedBorEvalContextExt};
4949

5050
// Used by priroda
51-
pub use crate::stacked_borrows::{Borrow, Stack, Stacks, Mut as MutBorrow, BorStackItem};
51+
pub use crate::stacked_borrows::{Borrow, Stack, Stacks, BorStackItem};
5252

5353
/// Insert rustc arguments at the beginning of the argument list that miri wants to be
5454
/// set per default, for maximal validation power.
@@ -476,38 +476,38 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> {
476476
#[inline(always)]
477477
fn tag_reference(
478478
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
479-
place: MemPlace<Borrow>,
480-
ty: Ty<'tcx>,
481-
size: Size,
479+
place: MPlaceTy<'tcx, Borrow>,
482480
mutability: Option<hir::Mutability>,
483-
) -> EvalResult<'tcx, MemPlace<Borrow>> {
481+
) -> EvalResult<'tcx, Scalar<Borrow>> {
482+
let (size, _) = ecx.size_and_align_of_mplace(place)?
483+
// for extern types, just cover what we can
484+
.unwrap_or_else(|| place.layout.size_and_align());
484485
if !ecx.machine.validate || size == Size::ZERO {
485486
// No tracking
486-
Ok(place)
487+
Ok(place.ptr)
487488
} else {
488489
let ptr = place.ptr.to_ptr()?;
489-
let tag = ecx.tag_reference(ptr, ty, size, mutability.into())?;
490-
let ptr = Scalar::Ptr(Pointer::new_with_tag(ptr.alloc_id, ptr.offset, tag));
491-
Ok(MemPlace { ptr, ..place })
490+
let tag = ecx.tag_reference(place, size, mutability.into())?;
491+
Ok(Scalar::Ptr(Pointer::new_with_tag(ptr.alloc_id, ptr.offset, tag)))
492492
}
493493
}
494494

495495
#[inline(always)]
496496
fn tag_dereference(
497497
ecx: &EvalContext<'a, 'mir, 'tcx, Self>,
498-
place: MemPlace<Borrow>,
499-
ty: Ty<'tcx>,
500-
size: Size,
498+
place: MPlaceTy<'tcx, Borrow>,
501499
mutability: Option<hir::Mutability>,
502-
) -> EvalResult<'tcx, MemPlace<Borrow>> {
500+
) -> EvalResult<'tcx, Scalar<Borrow>> {
501+
let (size, _) = ecx.size_and_align_of_mplace(place)?
502+
// for extern types, just cover what we can
503+
.unwrap_or_else(|| place.layout.size_and_align());
503504
if !ecx.machine.validate || size == Size::ZERO {
504505
// No tracking
505-
Ok(place)
506+
Ok(place.ptr)
506507
} else {
507508
let ptr = place.ptr.to_ptr()?;
508-
let tag = ecx.tag_dereference(ptr, ty, size, mutability.into())?;
509-
let ptr = Scalar::Ptr(Pointer::new_with_tag(ptr.alloc_id, ptr.offset, tag));
510-
Ok(MemPlace { ptr, ..place })
509+
let tag = ecx.tag_dereference(place, size, mutability.into())?;
510+
Ok(Scalar::Ptr(Pointer::new_with_tag(ptr.alloc_id, ptr.offset, tag)))
511511
}
512512
}
513513

0 commit comments

Comments
 (0)