Skip to content

Commit 945f007

Browse files
committed
Auto merge of #779 - christianpoveda:intptrcast-model, r=RalfJung,oli-obk
Implement intptrcast methods cc #224
2 parents d1873b6 + 7fbf8e5 commit 945f007

10 files changed

+262
-39
lines changed

src/fn_call.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -980,7 +980,7 @@ fn gen_random<'mir, 'tcx>(
980980
}
981981
let ptr = dest.to_ptr()?;
982982

983-
let data = match &mut this.machine.rng {
983+
let data = match &mut this.memory_mut().extra.rng {
984984
Some(rng) => {
985985
let mut data = vec![0; len];
986986
rng.fill_bytes(&mut data);

src/intptrcast.rs

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
use std::cell::{Cell, RefCell};
2+
3+
use rustc::mir::interpret::{AllocId, Pointer, InterpResult};
4+
use rustc_mir::interpret::Memory;
5+
use rustc_target::abi::Size;
6+
7+
use crate::stacked_borrows::Tag;
8+
use crate::Evaluator;
9+
10+
pub type MemoryExtra = RefCell<GlobalState>;
11+
12+
#[derive(Clone, Debug, Default)]
13+
pub struct AllocExtra {
14+
base_addr: Cell<Option<u64>>
15+
}
16+
17+
#[derive(Clone, Debug)]
18+
pub struct GlobalState {
19+
/// This is used as a map between the address of each allocation and its `AllocId`.
20+
/// It is always sorted
21+
pub int_to_ptr_map: Vec<(u64, AllocId)>,
22+
/// This is used as a memory address when a new pointer is casted to an integer. It
23+
/// is always larger than any address that was previously made part of a block.
24+
pub next_base_addr: u64,
25+
}
26+
27+
impl Default for GlobalState {
28+
// FIXME: Query the page size in the future
29+
fn default() -> Self {
30+
GlobalState {
31+
int_to_ptr_map: Vec::default(),
32+
next_base_addr: 2u64.pow(16)
33+
}
34+
}
35+
}
36+
37+
impl<'mir, 'tcx> GlobalState {
38+
pub fn int_to_ptr(
39+
int: u64,
40+
memory: &Memory<'mir, 'tcx, Evaluator<'tcx>>,
41+
) -> InterpResult<'tcx, Pointer<Tag>> {
42+
let global_state = memory.extra.intptrcast.borrow();
43+
44+
match global_state.int_to_ptr_map.binary_search_by_key(&int, |(addr, _)| *addr) {
45+
Ok(pos) => {
46+
let (_, alloc_id) = global_state.int_to_ptr_map[pos];
47+
// `int` is equal to the starting address for an allocation, the offset should be
48+
// zero. The pointer is untagged because it was created from a cast
49+
Ok(Pointer::new_with_tag(alloc_id, Size::from_bytes(0), Tag::Untagged))
50+
},
51+
Err(0) => err!(DanglingPointerDeref),
52+
Err(pos) => {
53+
// This is the largest of the adresses smaller than `int`,
54+
// i.e. the greatest lower bound (glb)
55+
let (glb, alloc_id) = global_state.int_to_ptr_map[pos - 1];
56+
// This never overflows because `int >= glb`
57+
let offset = int - glb;
58+
// If the offset exceeds the size of the allocation, this access is illegal
59+
if offset <= memory.get(alloc_id)?.bytes.len() as u64 {
60+
// This pointer is untagged because it was created from a cast
61+
Ok(Pointer::new_with_tag(alloc_id, Size::from_bytes(offset), Tag::Untagged))
62+
} else {
63+
err!(DanglingPointerDeref)
64+
}
65+
}
66+
}
67+
}
68+
69+
pub fn ptr_to_int(
70+
ptr: Pointer<Tag>,
71+
memory: &Memory<'mir, 'tcx, Evaluator<'tcx>>,
72+
) -> InterpResult<'tcx, u64> {
73+
let mut global_state = memory.extra.intptrcast.borrow_mut();
74+
75+
let alloc = memory.get(ptr.alloc_id)?;
76+
77+
let base_addr = match alloc.extra.intptrcast.base_addr.get() {
78+
Some(base_addr) => base_addr,
79+
None => {
80+
// This allocation does not have a base address yet, pick one.
81+
let base_addr = Self::align_addr(global_state.next_base_addr, alloc.align.bytes());
82+
global_state.next_base_addr = base_addr + alloc.bytes.len() as u64;
83+
alloc.extra.intptrcast.base_addr.set(Some(base_addr));
84+
// Given that `next_base_addr` increases in each allocation, pushing the
85+
// corresponding tuple keeps `int_to_ptr_map` sorted
86+
global_state.int_to_ptr_map.push((base_addr, ptr.alloc_id));
87+
88+
base_addr
89+
}
90+
};
91+
92+
Ok(base_addr + ptr.offset.bytes())
93+
}
94+
95+
/// Shifts `addr` to make it aligned with `align` by rounding `addr` to the smallest multiple
96+
/// of `align` that is strictly larger to `addr`
97+
fn align_addr(addr: u64, align: u64) -> u64 {
98+
addr + align - addr % align
99+
}
100+
}

src/lib.rs

Lines changed: 45 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@ mod tls;
2020
mod range_map;
2121
mod mono_hash_map;
2222
mod stacked_borrows;
23+
mod intptrcast;
24+
mod memory;
2325

2426
use std::collections::HashMap;
2527
use std::borrow::Cow;
@@ -48,6 +50,7 @@ use crate::range_map::RangeMap;
4850
pub use crate::helpers::{EvalContextExt as HelpersEvalContextExt};
4951
use crate::mono_hash_map::MonoHashMap;
5052
pub use crate::stacked_borrows::{EvalContextExt as StackedBorEvalContextExt};
53+
use crate::memory::AllocExtra;
5154

5255
// Used by priroda.
5356
pub use crate::stacked_borrows::{Tag, Permission, Stack, Stacks, Item};
@@ -79,9 +82,12 @@ pub fn create_ecx<'mir, 'tcx: 'mir>(
7982
let mut ecx = InterpretCx::new(
8083
tcx.at(syntax::source_map::DUMMY_SP),
8184
ty::ParamEnv::reveal_all(),
82-
Evaluator::new(config.validate, config.seed),
85+
Evaluator::new(config.validate),
8386
);
8487

88+
// FIXME: InterpretCx::new should take an initial MemoryExtra
89+
ecx.memory_mut().extra.rng = config.seed.map(StdRng::seed_from_u64);
90+
8591
let main_instance = ty::Instance::mono(ecx.tcx.tcx, main_id);
8692
let main_mir = ecx.load_mir(main_instance.def)?;
8793

@@ -205,7 +211,7 @@ pub fn create_ecx<'mir, 'tcx: 'mir>(
205211
cur_ptr = cur_ptr.offset(char_size, tcx)?;
206212
}
207213
}
208-
214+
209215
assert!(args.next().is_none(), "start lang item has more arguments than expected");
210216

211217
Ok(ecx)
@@ -341,14 +347,10 @@ pub struct Evaluator<'tcx> {
341347

342348
/// Whether to enforce the validity invariant.
343349
pub(crate) validate: bool,
344-
345-
/// The random number generator to use if Miri
346-
/// is running in non-deterministic mode
347-
pub(crate) rng: Option<StdRng>
348350
}
349351

350352
impl<'tcx> Evaluator<'tcx> {
351-
fn new(validate: bool, seed: Option<u64>) -> Self {
353+
fn new(validate: bool) -> Self {
352354
Evaluator {
353355
env_vars: HashMap::default(),
354356
argc: None,
@@ -357,7 +359,6 @@ impl<'tcx> Evaluator<'tcx> {
357359
last_error: 0,
358360
tls: TlsData::default(),
359361
validate,
360-
rng: seed.map(|s| StdRng::seed_from_u64(s))
361362
}
362363
}
363364
}
@@ -386,8 +387,8 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
386387
type MemoryKinds = MiriMemoryKind;
387388

388389
type FrameExtra = stacked_borrows::CallId;
389-
type MemoryExtra = stacked_borrows::MemoryState;
390-
type AllocExtra = stacked_borrows::Stacks;
390+
type MemoryExtra = memory::MemoryExtra;
391+
type AllocExtra = memory::AllocExtra;
391392
type PointerTag = Tag;
392393

393394
type MemoryMap = MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Tag, Self::AllocExtra>)>;
@@ -512,17 +513,17 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
512513
) -> (Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>, Self::PointerTag) {
513514
let kind = kind.expect("we set our STATIC_KIND so this cannot be None");
514515
let alloc = alloc.into_owned();
515-
let (extra, base_tag) = Stacks::new_allocation(
516+
let (stacks, base_tag) = Stacks::new_allocation(
516517
id,
517518
Size::from_bytes(alloc.bytes.len() as u64),
518-
Rc::clone(&memory.extra),
519+
Rc::clone(&memory.extra.stacked_borrows),
519520
kind,
520521
);
521522
if kind != MiriMemoryKind::Static.into() {
522523
assert!(alloc.relocations.is_empty(), "Only statics can come initialized with inner pointers");
523524
// Now we can rely on the inner pointers being static, too.
524525
}
525-
let mut memory_extra = memory.extra.borrow_mut();
526+
let mut memory_extra = memory.extra.stacked_borrows.borrow_mut();
526527
let alloc: Allocation<Tag, Self::AllocExtra> = Allocation {
527528
bytes: alloc.bytes,
528529
relocations: Relocations::from_presorted(
@@ -535,7 +536,10 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
535536
undef_mask: alloc.undef_mask,
536537
align: alloc.align,
537538
mutability: alloc.mutability,
538-
extra,
539+
extra: AllocExtra {
540+
stacked_borrows: stacks,
541+
intptrcast: Default::default(),
542+
},
539543
};
540544
(Cow::Owned(alloc), base_tag)
541545
}
@@ -545,7 +549,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
545549
id: AllocId,
546550
memory: &Memory<'mir, 'tcx, Self>,
547551
) -> Self::PointerTag {
548-
memory.extra.borrow_mut().static_base_ptr(id)
552+
memory.extra.stacked_borrows.borrow_mut().static_base_ptr(id)
549553
}
550554

551555
#[inline(always)]
@@ -570,14 +574,38 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
570574
fn stack_push(
571575
ecx: &mut InterpretCx<'mir, 'tcx, Self>,
572576
) -> InterpResult<'tcx, stacked_borrows::CallId> {
573-
Ok(ecx.memory().extra.borrow_mut().new_call())
577+
Ok(ecx.memory().extra.stacked_borrows.borrow_mut().new_call())
574578
}
575579

576580
#[inline(always)]
577581
fn stack_pop(
578582
ecx: &mut InterpretCx<'mir, 'tcx, Self>,
579583
extra: stacked_borrows::CallId,
580584
) -> InterpResult<'tcx> {
581-
Ok(ecx.memory().extra.borrow_mut().end_call(extra))
585+
Ok(ecx.memory().extra.stacked_borrows.borrow_mut().end_call(extra))
586+
}
587+
588+
fn int_to_ptr(
589+
int: u64,
590+
memory: &Memory<'mir, 'tcx, Self>,
591+
) -> InterpResult<'tcx, Pointer<Self::PointerTag>> {
592+
if int == 0 {
593+
err!(InvalidNullPointerUsage)
594+
} else if memory.extra.rng.is_none() {
595+
err!(ReadBytesAsPointer)
596+
} else {
597+
intptrcast::GlobalState::int_to_ptr(int, memory)
598+
}
599+
}
600+
601+
fn ptr_to_int(
602+
ptr: Pointer<Self::PointerTag>,
603+
memory: &Memory<'mir, 'tcx, Self>,
604+
) -> InterpResult<'tcx, u64> {
605+
if memory.extra.rng.is_none() {
606+
err!(ReadPointerAsBytes)
607+
} else {
608+
intptrcast::GlobalState::ptr_to_int(ptr, memory)
609+
}
582610
}
583611
}

src/memory.rs

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
use rand::rngs::StdRng;
2+
3+
use rustc_mir::interpret::{Pointer, Allocation, AllocationExtra, InterpResult};
4+
use rustc_target::abi::Size;
5+
6+
use crate::{stacked_borrows, intptrcast};
7+
use crate::stacked_borrows::Tag;
8+
9+
#[derive(Default, Clone, Debug)]
10+
pub struct MemoryExtra {
11+
pub stacked_borrows: stacked_borrows::MemoryExtra,
12+
pub intptrcast: intptrcast::MemoryExtra,
13+
/// The random number generator to use if Miri is running in non-deterministic mode and to
14+
/// enable intptrcast
15+
pub(crate) rng: Option<StdRng>
16+
}
17+
18+
#[derive(Debug, Clone)]
19+
pub struct AllocExtra {
20+
pub stacked_borrows: stacked_borrows::AllocExtra,
21+
pub intptrcast: intptrcast::AllocExtra,
22+
}
23+
24+
impl AllocationExtra<Tag> for AllocExtra {
25+
#[inline(always)]
26+
fn memory_read<'tcx>(
27+
alloc: &Allocation<Tag, AllocExtra>,
28+
ptr: Pointer<Tag>,
29+
size: Size,
30+
) -> InterpResult<'tcx> {
31+
alloc.extra.stacked_borrows.memory_read(ptr, size)
32+
}
33+
34+
#[inline(always)]
35+
fn memory_written<'tcx>(
36+
alloc: &mut Allocation<Tag, AllocExtra>,
37+
ptr: Pointer<Tag>,
38+
size: Size,
39+
) -> InterpResult<'tcx> {
40+
alloc.extra.stacked_borrows.memory_written(ptr, size)
41+
}
42+
43+
#[inline(always)]
44+
fn memory_deallocated<'tcx>(
45+
alloc: &mut Allocation<Tag, AllocExtra>,
46+
ptr: Pointer<Tag>,
47+
size: Size,
48+
) -> InterpResult<'tcx> {
49+
alloc.extra.stacked_borrows.memory_deallocated(ptr, size)
50+
}
51+
}

src/operator.rs

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,19 @@ impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
5656

5757
trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);
5858

59+
// If intptrcast is enabled and the operation is not an offset
60+
// we can force the cast from pointers to integer addresses and
61+
// then dispatch to rustc binary operation method
62+
if self.memory().extra.rng.is_some() && bin_op != Offset {
63+
let l_bits = self.force_bits(left.imm.to_scalar()?, left.layout.size)?;
64+
let r_bits = self.force_bits(right.imm.to_scalar()?, right.layout.size)?;
65+
66+
let left = ImmTy::from_scalar(Scalar::from_uint(l_bits, left.layout.size), left.layout);
67+
let right = ImmTy::from_scalar(Scalar::from_uint(r_bits, left.layout.size), right.layout);
68+
69+
return self.binary_op(bin_op, left, right);
70+
}
71+
5972
// Operations that support fat pointers
6073
match bin_op {
6174
Eq | Ne => {

0 commit comments

Comments
 (0)