Skip to content

Commit b9aecc8

Browse files
authored
Merge pull request #4018 from YohDeadfall/ecx-name-standardization
Standardized variable names for InterpCx
2 parents 052bdcb + 886c419 commit b9aecc8

File tree

1 file changed

+36
-36
lines changed

1 file changed

+36
-36
lines changed

src/alloc_addresses/mod.rs

Lines changed: 36 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -111,8 +111,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
111111
// Returns the exposed `AllocId` that corresponds to the specified addr,
112112
// or `None` if the addr is out of bounds
113113
fn alloc_id_from_addr(&self, addr: u64, size: i64) -> Option<AllocId> {
114-
let ecx = self.eval_context_ref();
115-
let global_state = ecx.machine.alloc_addresses.borrow();
114+
let this = self.eval_context_ref();
115+
let global_state = this.machine.alloc_addresses.borrow();
116116
assert!(global_state.provenance_mode != ProvenanceMode::Strict);
117117

118118
// We always search the allocation to the right of this address. So if the size is structly
@@ -134,15 +134,15 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
134134
// entered for addresses that are not the base address, so even zero-sized
135135
// allocations will get recognized at their base address -- but all other
136136
// allocations will *not* be recognized at their "end" address.
137-
let size = ecx.get_alloc_info(alloc_id).0;
137+
let size = this.get_alloc_info(alloc_id).0;
138138
if offset < size.bytes() { Some(alloc_id) } else { None }
139139
}
140140
}?;
141141

142142
// We only use this provenance if it has been exposed.
143143
if global_state.exposed.contains(&alloc_id) {
144144
// This must still be live, since we remove allocations from `int_to_ptr_map` when they get freed.
145-
debug_assert!(ecx.is_alloc_live(alloc_id));
145+
debug_assert!(this.is_alloc_live(alloc_id));
146146
Some(alloc_id)
147147
} else {
148148
None
@@ -155,9 +155,9 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
155155
alloc_id: AllocId,
156156
memory_kind: MemoryKind,
157157
) -> InterpResult<'tcx, u64> {
158-
let ecx = self.eval_context_ref();
159-
let mut rng = ecx.machine.rng.borrow_mut();
160-
let (size, align, kind) = ecx.get_alloc_info(alloc_id);
158+
let this = self.eval_context_ref();
159+
let mut rng = this.machine.rng.borrow_mut();
160+
let (size, align, kind) = this.get_alloc_info(alloc_id);
161161
// This is either called immediately after allocation (and then cached), or when
162162
// adjusting `tcx` pointers (which never get freed). So assert that we are looking
163163
// at a live allocation. This also ensures that we never re-assign an address to an
@@ -166,12 +166,12 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
166166
assert!(!matches!(kind, AllocKind::Dead));
167167

168168
// This allocation does not have a base address yet, pick or reuse one.
169-
if ecx.machine.native_lib.is_some() {
169+
if this.machine.native_lib.is_some() {
170170
// In native lib mode, we use the "real" address of the bytes for this allocation.
171171
// This ensures the interpreted program and native code have the same view of memory.
172172
let base_ptr = match kind {
173173
AllocKind::LiveData => {
174-
if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
174+
if this.tcx.try_get_global_alloc(alloc_id).is_some() {
175175
// For new global allocations, we always pre-allocate the memory to be able use the machine address directly.
176176
let prepared_bytes = MiriAllocBytes::zeroed(size, align)
177177
.unwrap_or_else(|| {
@@ -185,7 +185,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
185185
.unwrap();
186186
ptr
187187
} else {
188-
ecx.get_alloc_bytes_unchecked_raw(alloc_id)?
188+
this.get_alloc_bytes_unchecked_raw(alloc_id)?
189189
}
190190
}
191191
AllocKind::Function | AllocKind::VTable => {
@@ -204,10 +204,10 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
204204
}
205205
// We are not in native lib mode, so we control the addresses ourselves.
206206
if let Some((reuse_addr, clock)) =
207-
global_state.reuse.take_addr(&mut *rng, size, align, memory_kind, ecx.active_thread())
207+
global_state.reuse.take_addr(&mut *rng, size, align, memory_kind, this.active_thread())
208208
{
209209
if let Some(clock) = clock {
210-
ecx.acquire_clock(&clock);
210+
this.acquire_clock(&clock);
211211
}
212212
interp_ok(reuse_addr)
213213
} else {
@@ -230,7 +230,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
230230
.checked_add(max(size.bytes(), 1))
231231
.ok_or_else(|| err_exhaust!(AddressSpaceFull))?;
232232
// Even if `Size` didn't overflow, we might still have filled up the address space.
233-
if global_state.next_base_addr > ecx.target_usize_max() {
233+
if global_state.next_base_addr > this.target_usize_max() {
234234
throw_exhaust!(AddressSpaceFull);
235235
}
236236

@@ -243,8 +243,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
243243
alloc_id: AllocId,
244244
memory_kind: MemoryKind,
245245
) -> InterpResult<'tcx, u64> {
246-
let ecx = self.eval_context_ref();
247-
let mut global_state = ecx.machine.alloc_addresses.borrow_mut();
246+
let this = self.eval_context_ref();
247+
let mut global_state = this.machine.alloc_addresses.borrow_mut();
248248
let global_state = &mut *global_state;
249249

250250
match global_state.base_addr.get(&alloc_id) {
@@ -283,31 +283,31 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
283283
impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {}
284284
pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
285285
fn expose_ptr(&mut self, alloc_id: AllocId, tag: BorTag) -> InterpResult<'tcx> {
286-
let ecx = self.eval_context_mut();
287-
let global_state = ecx.machine.alloc_addresses.get_mut();
286+
let this = self.eval_context_mut();
287+
let global_state = this.machine.alloc_addresses.get_mut();
288288
// In strict mode, we don't need this, so we can save some cycles by not tracking it.
289289
if global_state.provenance_mode == ProvenanceMode::Strict {
290290
return interp_ok(());
291291
}
292292
// Exposing a dead alloc is a no-op, because it's not possible to get a dead allocation
293293
// via int2ptr.
294-
if !ecx.is_alloc_live(alloc_id) {
294+
if !this.is_alloc_live(alloc_id) {
295295
return interp_ok(());
296296
}
297297
trace!("Exposing allocation id {alloc_id:?}");
298-
let global_state = ecx.machine.alloc_addresses.get_mut();
298+
let global_state = this.machine.alloc_addresses.get_mut();
299299
global_state.exposed.insert(alloc_id);
300-
if ecx.machine.borrow_tracker.is_some() {
301-
ecx.expose_tag(alloc_id, tag)?;
300+
if this.machine.borrow_tracker.is_some() {
301+
this.expose_tag(alloc_id, tag)?;
302302
}
303303
interp_ok(())
304304
}
305305

306306
fn ptr_from_addr_cast(&self, addr: u64) -> InterpResult<'tcx, Pointer> {
307307
trace!("Casting {:#x} to a pointer", addr);
308308

309-
let ecx = self.eval_context_ref();
310-
let global_state = ecx.machine.alloc_addresses.borrow();
309+
let this = self.eval_context_ref();
310+
let global_state = this.machine.alloc_addresses.borrow();
311311

312312
// Potentially emit a warning.
313313
match global_state.provenance_mode {
@@ -319,9 +319,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
319319
}
320320
PAST_WARNINGS.with_borrow_mut(|past_warnings| {
321321
let first = past_warnings.is_empty();
322-
if past_warnings.insert(ecx.cur_span()) {
322+
if past_warnings.insert(this.cur_span()) {
323323
// Newly inserted, so first time we see this span.
324-
ecx.emit_diagnostic(NonHaltingDiagnostic::Int2Ptr { details: first });
324+
this.emit_diagnostic(NonHaltingDiagnostic::Int2Ptr { details: first });
325325
}
326326
});
327327
}
@@ -347,19 +347,19 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
347347
tag: BorTag,
348348
kind: MemoryKind,
349349
) -> InterpResult<'tcx, interpret::Pointer<Provenance>> {
350-
let ecx = self.eval_context_ref();
350+
let this = self.eval_context_ref();
351351

352352
let (prov, offset) = ptr.into_parts(); // offset is relative (AllocId provenance)
353353
let alloc_id = prov.alloc_id();
354354

355355
// Get a pointer to the beginning of this allocation.
356-
let base_addr = ecx.addr_from_alloc_id(alloc_id, kind)?;
356+
let base_addr = this.addr_from_alloc_id(alloc_id, kind)?;
357357
let base_ptr = interpret::Pointer::new(
358358
Provenance::Concrete { alloc_id, tag },
359359
Size::from_bytes(base_addr),
360360
);
361361
// Add offset with the right kind of pointer-overflowing arithmetic.
362-
interp_ok(base_ptr.wrapping_offset(offset, ecx))
362+
interp_ok(base_ptr.wrapping_offset(offset, this))
363363
}
364364

365365
// This returns some prepared `MiriAllocBytes`, either because `addr_from_alloc_id` reserved
@@ -371,16 +371,16 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
371371
bytes: &[u8],
372372
align: Align,
373373
) -> InterpResult<'tcx, MiriAllocBytes> {
374-
let ecx = self.eval_context_ref();
375-
if ecx.machine.native_lib.is_some() {
374+
let this = self.eval_context_ref();
375+
if this.machine.native_lib.is_some() {
376376
// In native lib mode, MiriAllocBytes for global allocations are handled via `prepared_alloc_bytes`.
377377
// This additional call ensures that some `MiriAllocBytes` are always prepared, just in case
378378
// this function gets called before the first time `addr_from_alloc_id` gets called.
379-
ecx.addr_from_alloc_id(id, kind)?;
379+
this.addr_from_alloc_id(id, kind)?;
380380
// The memory we need here will have already been allocated during an earlier call to
381381
// `addr_from_alloc_id` for this allocation. So don't create a new `MiriAllocBytes` here, instead
382382
// fetch the previously prepared bytes from `prepared_alloc_bytes`.
383-
let mut global_state = ecx.machine.alloc_addresses.borrow_mut();
383+
let mut global_state = this.machine.alloc_addresses.borrow_mut();
384384
let mut prepared_alloc_bytes = global_state
385385
.prepared_alloc_bytes
386386
.remove(&id)
@@ -403,23 +403,23 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
403403
ptr: interpret::Pointer<Provenance>,
404404
size: i64,
405405
) -> Option<(AllocId, Size)> {
406-
let ecx = self.eval_context_ref();
406+
let this = self.eval_context_ref();
407407

408408
let (tag, addr) = ptr.into_parts(); // addr is absolute (Tag provenance)
409409

410410
let alloc_id = if let Provenance::Concrete { alloc_id, .. } = tag {
411411
alloc_id
412412
} else {
413413
// A wildcard pointer.
414-
ecx.alloc_id_from_addr(addr.bytes(), size)?
414+
this.alloc_id_from_addr(addr.bytes(), size)?
415415
};
416416

417417
// This cannot fail: since we already have a pointer with that provenance, adjust_alloc_root_pointer
418418
// must have been called in the past, so we can just look up the address in the map.
419-
let base_addr = *ecx.machine.alloc_addresses.borrow().base_addr.get(&alloc_id).unwrap();
419+
let base_addr = *this.machine.alloc_addresses.borrow().base_addr.get(&alloc_id).unwrap();
420420

421421
// Wrapping "addr - base_addr"
422-
let rel_offset = ecx.truncate_to_target_usize(addr.bytes().wrapping_sub(base_addr));
422+
let rel_offset = this.truncate_to_target_usize(addr.bytes().wrapping_sub(base_addr));
423423
Some((alloc_id, Size::from_bytes(rel_offset)))
424424
}
425425
}

0 commit comments

Comments
 (0)