@@ -111,8 +111,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
111
111
// Returns the exposed `AllocId` that corresponds to the specified addr,
112
112
// or `None` if the addr is out of bounds
113
113
fn alloc_id_from_addr ( & self , addr : u64 , size : i64 ) -> Option < AllocId > {
114
- let ecx = self . eval_context_ref ( ) ;
115
- let global_state = ecx . machine . alloc_addresses . borrow ( ) ;
114
+ let this = self . eval_context_ref ( ) ;
115
+ let global_state = this . machine . alloc_addresses . borrow ( ) ;
116
116
assert ! ( global_state. provenance_mode != ProvenanceMode :: Strict ) ;
117
117
118
118
// We always search the allocation to the right of this address. So if the size is structly
@@ -134,15 +134,15 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
134
134
// entered for addresses that are not the base address, so even zero-sized
135
135
// allocations will get recognized at their base address -- but all other
136
136
// allocations will *not* be recognized at their "end" address.
137
- let size = ecx . get_alloc_info ( alloc_id) . 0 ;
137
+ let size = this . get_alloc_info ( alloc_id) . 0 ;
138
138
if offset < size. bytes ( ) { Some ( alloc_id) } else { None }
139
139
}
140
140
} ?;
141
141
142
142
// We only use this provenance if it has been exposed.
143
143
if global_state. exposed . contains ( & alloc_id) {
144
144
// This must still be live, since we remove allocations from `int_to_ptr_map` when they get freed.
145
- debug_assert ! ( ecx . is_alloc_live( alloc_id) ) ;
145
+ debug_assert ! ( this . is_alloc_live( alloc_id) ) ;
146
146
Some ( alloc_id)
147
147
} else {
148
148
None
@@ -155,9 +155,9 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
155
155
alloc_id : AllocId ,
156
156
memory_kind : MemoryKind ,
157
157
) -> InterpResult < ' tcx , u64 > {
158
- let ecx = self . eval_context_ref ( ) ;
159
- let mut rng = ecx . machine . rng . borrow_mut ( ) ;
160
- let ( size, align, kind) = ecx . get_alloc_info ( alloc_id) ;
158
+ let this = self . eval_context_ref ( ) ;
159
+ let mut rng = this . machine . rng . borrow_mut ( ) ;
160
+ let ( size, align, kind) = this . get_alloc_info ( alloc_id) ;
161
161
// This is either called immediately after allocation (and then cached), or when
162
162
// adjusting `tcx` pointers (which never get freed). So assert that we are looking
163
163
// at a live allocation. This also ensures that we never re-assign an address to an
@@ -166,12 +166,12 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
166
166
assert ! ( !matches!( kind, AllocKind :: Dead ) ) ;
167
167
168
168
// This allocation does not have a base address yet, pick or reuse one.
169
- if ecx . machine . native_lib . is_some ( ) {
169
+ if this . machine . native_lib . is_some ( ) {
170
170
// In native lib mode, we use the "real" address of the bytes for this allocation.
171
171
// This ensures the interpreted program and native code have the same view of memory.
172
172
let base_ptr = match kind {
173
173
AllocKind :: LiveData => {
174
- if ecx . tcx . try_get_global_alloc ( alloc_id) . is_some ( ) {
174
+ if this . tcx . try_get_global_alloc ( alloc_id) . is_some ( ) {
175
175
// For new global allocations, we always pre-allocate the memory to be able use the machine address directly.
176
176
let prepared_bytes = MiriAllocBytes :: zeroed ( size, align)
177
177
. unwrap_or_else ( || {
@@ -185,7 +185,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
185
185
. unwrap ( ) ;
186
186
ptr
187
187
} else {
188
- ecx . get_alloc_bytes_unchecked_raw ( alloc_id) ?
188
+ this . get_alloc_bytes_unchecked_raw ( alloc_id) ?
189
189
}
190
190
}
191
191
AllocKind :: Function | AllocKind :: VTable => {
@@ -204,10 +204,10 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
204
204
}
205
205
// We are not in native lib mode, so we control the addresses ourselves.
206
206
if let Some ( ( reuse_addr, clock) ) =
207
- global_state. reuse . take_addr ( & mut * rng, size, align, memory_kind, ecx . active_thread ( ) )
207
+ global_state. reuse . take_addr ( & mut * rng, size, align, memory_kind, this . active_thread ( ) )
208
208
{
209
209
if let Some ( clock) = clock {
210
- ecx . acquire_clock ( & clock) ;
210
+ this . acquire_clock ( & clock) ;
211
211
}
212
212
interp_ok ( reuse_addr)
213
213
} else {
@@ -230,7 +230,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
230
230
. checked_add ( max ( size. bytes ( ) , 1 ) )
231
231
. ok_or_else ( || err_exhaust ! ( AddressSpaceFull ) ) ?;
232
232
// Even if `Size` didn't overflow, we might still have filled up the address space.
233
- if global_state. next_base_addr > ecx . target_usize_max ( ) {
233
+ if global_state. next_base_addr > this . target_usize_max ( ) {
234
234
throw_exhaust ! ( AddressSpaceFull ) ;
235
235
}
236
236
@@ -243,8 +243,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
243
243
alloc_id : AllocId ,
244
244
memory_kind : MemoryKind ,
245
245
) -> InterpResult < ' tcx , u64 > {
246
- let ecx = self . eval_context_ref ( ) ;
247
- let mut global_state = ecx . machine . alloc_addresses . borrow_mut ( ) ;
246
+ let this = self . eval_context_ref ( ) ;
247
+ let mut global_state = this . machine . alloc_addresses . borrow_mut ( ) ;
248
248
let global_state = & mut * global_state;
249
249
250
250
match global_state. base_addr . get ( & alloc_id) {
@@ -283,31 +283,31 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
283
283
impl < ' tcx > EvalContextExt < ' tcx > for crate :: MiriInterpCx < ' tcx > { }
284
284
pub trait EvalContextExt < ' tcx > : crate :: MiriInterpCxExt < ' tcx > {
285
285
fn expose_ptr ( & mut self , alloc_id : AllocId , tag : BorTag ) -> InterpResult < ' tcx > {
286
- let ecx = self . eval_context_mut ( ) ;
287
- let global_state = ecx . machine . alloc_addresses . get_mut ( ) ;
286
+ let this = self . eval_context_mut ( ) ;
287
+ let global_state = this . machine . alloc_addresses . get_mut ( ) ;
288
288
// In strict mode, we don't need this, so we can save some cycles by not tracking it.
289
289
if global_state. provenance_mode == ProvenanceMode :: Strict {
290
290
return interp_ok ( ( ) ) ;
291
291
}
292
292
// Exposing a dead alloc is a no-op, because it's not possible to get a dead allocation
293
293
// via int2ptr.
294
- if !ecx . is_alloc_live ( alloc_id) {
294
+ if !this . is_alloc_live ( alloc_id) {
295
295
return interp_ok ( ( ) ) ;
296
296
}
297
297
trace ! ( "Exposing allocation id {alloc_id:?}" ) ;
298
- let global_state = ecx . machine . alloc_addresses . get_mut ( ) ;
298
+ let global_state = this . machine . alloc_addresses . get_mut ( ) ;
299
299
global_state. exposed . insert ( alloc_id) ;
300
- if ecx . machine . borrow_tracker . is_some ( ) {
301
- ecx . expose_tag ( alloc_id, tag) ?;
300
+ if this . machine . borrow_tracker . is_some ( ) {
301
+ this . expose_tag ( alloc_id, tag) ?;
302
302
}
303
303
interp_ok ( ( ) )
304
304
}
305
305
306
306
fn ptr_from_addr_cast ( & self , addr : u64 ) -> InterpResult < ' tcx , Pointer > {
307
307
trace ! ( "Casting {:#x} to a pointer" , addr) ;
308
308
309
- let ecx = self . eval_context_ref ( ) ;
310
- let global_state = ecx . machine . alloc_addresses . borrow ( ) ;
309
+ let this = self . eval_context_ref ( ) ;
310
+ let global_state = this . machine . alloc_addresses . borrow ( ) ;
311
311
312
312
// Potentially emit a warning.
313
313
match global_state. provenance_mode {
@@ -319,9 +319,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
319
319
}
320
320
PAST_WARNINGS . with_borrow_mut ( |past_warnings| {
321
321
let first = past_warnings. is_empty ( ) ;
322
- if past_warnings. insert ( ecx . cur_span ( ) ) {
322
+ if past_warnings. insert ( this . cur_span ( ) ) {
323
323
// Newly inserted, so first time we see this span.
324
- ecx . emit_diagnostic ( NonHaltingDiagnostic :: Int2Ptr { details : first } ) ;
324
+ this . emit_diagnostic ( NonHaltingDiagnostic :: Int2Ptr { details : first } ) ;
325
325
}
326
326
} ) ;
327
327
}
@@ -347,19 +347,19 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
347
347
tag : BorTag ,
348
348
kind : MemoryKind ,
349
349
) -> InterpResult < ' tcx , interpret:: Pointer < Provenance > > {
350
- let ecx = self . eval_context_ref ( ) ;
350
+ let this = self . eval_context_ref ( ) ;
351
351
352
352
let ( prov, offset) = ptr. into_parts ( ) ; // offset is relative (AllocId provenance)
353
353
let alloc_id = prov. alloc_id ( ) ;
354
354
355
355
// Get a pointer to the beginning of this allocation.
356
- let base_addr = ecx . addr_from_alloc_id ( alloc_id, kind) ?;
356
+ let base_addr = this . addr_from_alloc_id ( alloc_id, kind) ?;
357
357
let base_ptr = interpret:: Pointer :: new (
358
358
Provenance :: Concrete { alloc_id, tag } ,
359
359
Size :: from_bytes ( base_addr) ,
360
360
) ;
361
361
// Add offset with the right kind of pointer-overflowing arithmetic.
362
- interp_ok ( base_ptr. wrapping_offset ( offset, ecx ) )
362
+ interp_ok ( base_ptr. wrapping_offset ( offset, this ) )
363
363
}
364
364
365
365
// This returns some prepared `MiriAllocBytes`, either because `addr_from_alloc_id` reserved
@@ -371,16 +371,16 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
371
371
bytes : & [ u8 ] ,
372
372
align : Align ,
373
373
) -> InterpResult < ' tcx , MiriAllocBytes > {
374
- let ecx = self . eval_context_ref ( ) ;
375
- if ecx . machine . native_lib . is_some ( ) {
374
+ let this = self . eval_context_ref ( ) ;
375
+ if this . machine . native_lib . is_some ( ) {
376
376
// In native lib mode, MiriAllocBytes for global allocations are handled via `prepared_alloc_bytes`.
377
377
// This additional call ensures that some `MiriAllocBytes` are always prepared, just in case
378
378
// this function gets called before the first time `addr_from_alloc_id` gets called.
379
- ecx . addr_from_alloc_id ( id, kind) ?;
379
+ this . addr_from_alloc_id ( id, kind) ?;
380
380
// The memory we need here will have already been allocated during an earlier call to
381
381
// `addr_from_alloc_id` for this allocation. So don't create a new `MiriAllocBytes` here, instead
382
382
// fetch the previously prepared bytes from `prepared_alloc_bytes`.
383
- let mut global_state = ecx . machine . alloc_addresses . borrow_mut ( ) ;
383
+ let mut global_state = this . machine . alloc_addresses . borrow_mut ( ) ;
384
384
let mut prepared_alloc_bytes = global_state
385
385
. prepared_alloc_bytes
386
386
. remove ( & id)
@@ -403,23 +403,23 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
403
403
ptr : interpret:: Pointer < Provenance > ,
404
404
size : i64 ,
405
405
) -> Option < ( AllocId , Size ) > {
406
- let ecx = self . eval_context_ref ( ) ;
406
+ let this = self . eval_context_ref ( ) ;
407
407
408
408
let ( tag, addr) = ptr. into_parts ( ) ; // addr is absolute (Tag provenance)
409
409
410
410
let alloc_id = if let Provenance :: Concrete { alloc_id, .. } = tag {
411
411
alloc_id
412
412
} else {
413
413
// A wildcard pointer.
414
- ecx . alloc_id_from_addr ( addr. bytes ( ) , size) ?
414
+ this . alloc_id_from_addr ( addr. bytes ( ) , size) ?
415
415
} ;
416
416
417
417
// This cannot fail: since we already have a pointer with that provenance, adjust_alloc_root_pointer
418
418
// must have been called in the past, so we can just look up the address in the map.
419
- let base_addr = * ecx . machine . alloc_addresses . borrow ( ) . base_addr . get ( & alloc_id) . unwrap ( ) ;
419
+ let base_addr = * this . machine . alloc_addresses . borrow ( ) . base_addr . get ( & alloc_id) . unwrap ( ) ;
420
420
421
421
// Wrapping "addr - base_addr"
422
- let rel_offset = ecx . truncate_to_target_usize ( addr. bytes ( ) . wrapping_sub ( base_addr) ) ;
422
+ let rel_offset = this . truncate_to_target_usize ( addr. bytes ( ) . wrapping_sub ( base_addr) ) ;
423
423
Some ( ( alloc_id, Size :: from_bytes ( rel_offset) ) )
424
424
}
425
425
}
0 commit comments