|
1 | 1 | use std::mem;
|
2 | 2 |
|
3 |
| -use rustc::ty; |
| 3 | +use rustc::ty::{self, layout}; |
4 | 4 | use rustc::hir::def_id::{DefId, CRATE_DEF_INDEX};
|
5 | 5 |
|
6 | 6 | use crate::*;
|
@@ -32,6 +32,15 @@ impl<Tag> ScalarExt for ScalarMaybeUndef<Tag> {
|
32 | 32 |
|
33 | 33 | pub trait EvalContextExt<'tcx> {
|
34 | 34 | fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>>;
|
| 35 | + |
| 36 | + /// Visit the memory covered by `place` that is frozen -- i.e., NOT |
| 37 | + /// what is inside an `UnsafeCell`. |
| 38 | + fn visit_frozen( |
| 39 | + &self, |
| 40 | + place: MPlaceTy<'tcx, Borrow>, |
| 41 | + size: Size, |
| 42 | + action: impl FnMut(Pointer<Borrow>, Size) -> EvalResult<'tcx>, |
| 43 | + ) -> EvalResult<'tcx>; |
35 | 44 | }
|
36 | 45 |
|
37 | 46 |
|
@@ -69,4 +78,153 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:
|
69 | 78 | EvalErrorKind::PathNotFound(path).into()
|
70 | 79 | })
|
71 | 80 | }
|
| 81 | + |
| 82 | + /// Visit the memory covered by `place` that is frozen -- i.e., NOT |
| 83 | + /// what is inside an `UnsafeCell`. |
| 84 | + fn visit_frozen( |
| 85 | + &self, |
| 86 | + place: MPlaceTy<'tcx, Borrow>, |
| 87 | + size: Size, |
| 88 | + mut frozen_action: impl FnMut(Pointer<Borrow>, Size) -> EvalResult<'tcx>, |
| 89 | + ) -> EvalResult<'tcx> { |
| 90 | + trace!("visit_frozen(place={:?}, size={:?})", *place, size); |
| 91 | + debug_assert_eq!(size, |
| 92 | + self.size_and_align_of_mplace(place)? |
| 93 | + .map(|(size, _)| size) |
| 94 | + .unwrap_or_else(|| place.layout.size) |
| 95 | + ); |
| 96 | + // Store how far we proceeded into the place so far. Everything to the left of |
| 97 | + // this offset has already been handled, in the sense that the frozen parts |
| 98 | + // have had `action` called on them. |
| 99 | + let mut end_ptr = place.ptr; |
| 100 | + // Called when we detected an `UnsafeCell` at the given offset and size. |
| 101 | + // Calls `action` and advances `end_ptr`. |
| 102 | + let mut unsafe_cell_action = |unsafe_cell_offset, unsafe_cell_size| { |
| 103 | + // We assume that we are given the fields in increasing offset order, |
| 104 | + // and nothing else changes. |
| 105 | + let end_offset = end_ptr.get_ptr_offset(self); |
| 106 | + assert!(unsafe_cell_offset >= end_offset); |
| 107 | + let frozen_size = unsafe_cell_offset - end_offset; |
| 108 | + // Everything between the end_ptr and this `UnsafeCell` is frozen. |
| 109 | + if frozen_size != Size::ZERO { |
| 110 | + frozen_action(end_ptr.to_ptr()?, frozen_size)?; |
| 111 | + } |
| 112 | + // Update end end_ptr. |
| 113 | + end_ptr = end_ptr.ptr_wrapping_offset(frozen_size+unsafe_cell_size, self); |
| 114 | + // Done |
| 115 | + Ok(()) |
| 116 | + }; |
| 117 | + // Run a visitor |
| 118 | + { |
| 119 | + let mut visitor = UnsafeCellVisitor { |
| 120 | + ecx: self, |
| 121 | + unsafe_cell_action: |place| { |
| 122 | + trace!("unsafe_cell_action on {:?}", place.ptr); |
| 123 | + // We need a size to go on. |
| 124 | + let (unsafe_cell_size, _) = self.size_and_align_of_mplace(place)? |
| 125 | + // for extern types, just cover what we can |
| 126 | + .unwrap_or_else(|| place.layout.size_and_align()); |
| 127 | + // Now handle this `UnsafeCell`, unless it is empty. |
| 128 | + if unsafe_cell_size != Size::ZERO { |
| 129 | + unsafe_cell_action(place.ptr.get_ptr_offset(self), unsafe_cell_size) |
| 130 | + } else { |
| 131 | + Ok(()) |
| 132 | + } |
| 133 | + }, |
| 134 | + }; |
| 135 | + visitor.visit_value(place)?; |
| 136 | + } |
| 137 | + // The part between the end_ptr and the end of the place is also frozen. |
| 138 | + // So pretend there is a 0-sized `UnsafeCell` at the end. |
| 139 | + unsafe_cell_action(place.ptr.get_ptr_offset(self) + size, Size::ZERO)?; |
| 140 | + // Done! |
| 141 | + return Ok(()); |
| 142 | + |
| 143 | + /// Visiting the memory covered by a `MemPlace`, being aware of |
| 144 | + /// whether we are inside an `UnsafeCell` or not. |
| 145 | + struct UnsafeCellVisitor<'ecx, 'a, 'mir, 'tcx, F> |
| 146 | + where F: FnMut(MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> |
| 147 | + { |
| 148 | + ecx: &'ecx MiriEvalContext<'a, 'mir, 'tcx>, |
| 149 | + unsafe_cell_action: F, |
| 150 | + } |
| 151 | + |
| 152 | + impl<'ecx, 'a, 'mir, 'tcx, F> ValueVisitor<'a, 'mir, 'tcx, Evaluator<'tcx>> |
| 153 | + for UnsafeCellVisitor<'ecx, 'a, 'mir, 'tcx, F> |
| 154 | + where |
| 155 | + F: FnMut(MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> |
| 156 | + { |
| 157 | + type V = MPlaceTy<'tcx, Borrow>; |
| 158 | + |
| 159 | + #[inline(always)] |
| 160 | + fn ecx(&self) -> &MiriEvalContext<'a, 'mir, 'tcx> { |
| 161 | + &self.ecx |
| 162 | + } |
| 163 | + |
| 164 | + // Hook to detect `UnsafeCell` |
| 165 | + fn visit_value(&mut self, v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> |
| 166 | + { |
| 167 | + trace!("UnsafeCellVisitor: {:?} {:?}", *v, v.layout.ty); |
| 168 | + let is_unsafe_cell = match v.layout.ty.sty { |
| 169 | + ty::Adt(adt, _) => Some(adt.did) == self.ecx.tcx.lang_items().unsafe_cell_type(), |
| 170 | + _ => false, |
| 171 | + }; |
| 172 | + if is_unsafe_cell { |
| 173 | + // We do not have to recurse further, this is an `UnsafeCell`. |
| 174 | + (self.unsafe_cell_action)(v) |
| 175 | + } else if self.ecx.type_is_freeze(v.layout.ty) { |
| 176 | + // This is `Freeze`, there cannot be an `UnsafeCell` |
| 177 | + Ok(()) |
| 178 | + } else { |
| 179 | + // Proceed further |
| 180 | + self.walk_value(v) |
| 181 | + } |
| 182 | + } |
| 183 | + |
| 184 | + // Make sure we visit aggregrates in increasing offset order |
| 185 | + fn visit_aggregate( |
| 186 | + &mut self, |
| 187 | + place: MPlaceTy<'tcx, Borrow>, |
| 188 | + fields: impl Iterator<Item=EvalResult<'tcx, MPlaceTy<'tcx, Borrow>>>, |
| 189 | + ) -> EvalResult<'tcx> { |
| 190 | + match place.layout.fields { |
| 191 | + layout::FieldPlacement::Array { .. } => { |
| 192 | + // For the array layout, we know the iterator will yield sorted elements so |
| 193 | + // we can avoid the allocation. |
| 194 | + self.walk_aggregate(place, fields) |
| 195 | + } |
| 196 | + layout::FieldPlacement::Arbitrary { .. } => { |
| 197 | + // Gather the subplaces and sort them before visiting. |
| 198 | + let mut places = fields.collect::<EvalResult<'tcx, Vec<MPlaceTy<'tcx, Borrow>>>>()?; |
| 199 | + places[..].sort_by_key(|place| place.ptr.get_ptr_offset(self.ecx())); |
| 200 | + self.walk_aggregate(place, places.into_iter().map(Ok)) |
| 201 | + } |
| 202 | + layout::FieldPlacement::Union { .. } => { |
| 203 | + // Uh, what? |
| 204 | + bug!("A union is not an aggregate we should ever visit") |
| 205 | + } |
| 206 | + } |
| 207 | + } |
| 208 | + |
| 209 | + // We have to do *something* for unions |
| 210 | + fn visit_union(&mut self, v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> |
| 211 | + { |
| 212 | + // With unions, we fall back to whatever the type says, to hopefully be consistent |
| 213 | + // with LLVM IR. |
| 214 | + // FIXME Are we consistent? And is this really the behavior we want? |
| 215 | + let frozen = self.ecx.type_is_freeze(v.layout.ty); |
| 216 | + if frozen { |
| 217 | + Ok(()) |
| 218 | + } else { |
| 219 | + (self.unsafe_cell_action)(v) |
| 220 | + } |
| 221 | + } |
| 222 | + |
| 223 | + // We should never get to a primitive, but always short-circuit somewhere above |
| 224 | + fn visit_primitive(&mut self, _val: ImmTy<'tcx, Borrow>) -> EvalResult<'tcx> |
| 225 | + { |
| 226 | + bug!("We should always short-circit before coming to a primitive") |
| 227 | + } |
| 228 | + } |
| 229 | + } |
72 | 230 | }
|
0 commit comments