@@ -23,6 +23,10 @@ use crate::*;
23
23
pub mod diagnostics;
24
24
use diagnostics:: { AllocHistory , TagHistory } ;
25
25
26
+ pub mod stack;
27
+ use stack:: Stack ;
28
+
29
+ pub type PtrId = NonZeroU64 ;
26
30
pub type CallId = NonZeroU64 ;
27
31
28
32
// Even reading memory can have effects on the stack, so we need a `RefCell` here.
@@ -111,23 +115,6 @@ impl fmt::Debug for Item {
111
115
}
112
116
}
113
117
114
- /// Extra per-location state.
115
- #[ derive( Clone , Debug , PartialEq , Eq ) ]
116
- pub struct Stack {
117
- /// Used *mostly* as a stack; never empty.
118
- /// Invariants:
119
- /// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`.
120
- /// * No tag occurs in the stack more than once.
121
- borrows : Vec < Item > ,
122
- /// If this is `Some(id)`, then the actual current stack is unknown. This can happen when
123
- /// wildcard pointers are used to access this location. What we do know is that `borrows` are at
124
- /// the top of the stack, and below it are arbitrarily many items whose `tag` is strictly less
125
- /// than `id`.
126
- /// When the bottom is unknown, `borrows` always has a `SharedReadOnly` or `Unique` at the bottom;
127
- /// we never have the unknown-to-known boundary in an SRW group.
128
- unknown_bottom : Option < SbTag > ,
129
- }
130
-
131
118
/// Extra per-allocation state.
132
119
#[ derive( Clone , Debug ) ]
133
120
pub struct Stacks {
@@ -297,65 +284,10 @@ impl Permission {
297
284
298
285
/// Core per-location operations: access, dealloc, reborrow.
299
286
impl < ' tcx > Stack {
300
- /// Find the item granting the given kind of access to the given tag, and return where
301
- /// it is on the stack. For wildcard tags, the given index is approximate, but if *no*
302
- /// index is given it means the match was *not* in the known part of the stack.
303
- /// `Ok(None)` indicates it matched the "unknown" part of the stack.
304
- /// `Err` indicates it was not found.
305
- fn find_granting (
306
- & self ,
307
- access : AccessKind ,
308
- tag : SbTagExtra ,
309
- exposed_tags : & FxHashSet < SbTag > ,
310
- ) -> Result < Option < usize > , ( ) > {
311
- let SbTagExtra :: Concrete ( tag) = tag else {
312
- // Handle the wildcard case.
313
- // Go search the stack for an exposed tag.
314
- if let Some ( idx) =
315
- self . borrows
316
- . iter ( )
317
- . enumerate ( ) // we also need to know *where* in the stack
318
- . rev ( ) // search top-to-bottom
319
- . find_map ( |( idx, item) | {
320
- // If the item fits and *might* be this wildcard, use it.
321
- if item. perm . grants ( access) && exposed_tags. contains ( & item. tag ) {
322
- Some ( idx)
323
- } else {
324
- None
325
- }
326
- } )
327
- {
328
- return Ok ( Some ( idx) ) ;
329
- }
330
- // If we couldn't find it in the stack, check the unknown bottom.
331
- return if self . unknown_bottom . is_some ( ) { Ok ( None ) } else { Err ( ( ) ) } ;
332
- } ;
333
-
334
- if let Some ( idx) =
335
- self . borrows
336
- . iter ( )
337
- . enumerate ( ) // we also need to know *where* in the stack
338
- . rev ( ) // search top-to-bottom
339
- // Return permission of first item that grants access.
340
- // We require a permission with the right tag, ensuring U3 and F3.
341
- . find_map ( |( idx, item) | {
342
- if tag == item. tag && item. perm . grants ( access) { Some ( idx) } else { None }
343
- } )
344
- {
345
- return Ok ( Some ( idx) ) ;
346
- }
347
-
348
- // Couldn't find it in the stack; but if there is an unknown bottom it might be there.
349
- let found = self . unknown_bottom . is_some_and ( |& unknown_limit| {
350
- tag. 0 < unknown_limit. 0 // unknown_limit is an upper bound for what can be in the unknown bottom.
351
- } ) ;
352
- if found { Ok ( None ) } else { Err ( ( ) ) }
353
- }
354
-
355
287
/// Find the first write-incompatible item above the given one --
356
288
/// i.e, find the height to which the stack will be truncated when writing to `granting`.
357
289
fn find_first_write_incompatible ( & self , granting : usize ) -> usize {
358
- let perm = self . borrows [ granting] . perm ;
290
+ let perm = self . get ( granting) . unwrap ( ) . perm ;
359
291
match perm {
360
292
Permission :: SharedReadOnly => bug ! ( "Cannot use SharedReadOnly for writing" ) ,
361
293
Permission :: Disabled => bug ! ( "Cannot use Disabled for anything" ) ,
@@ -366,7 +298,7 @@ impl<'tcx> Stack {
366
298
Permission :: SharedReadWrite => {
367
299
// The SharedReadWrite *just* above us are compatible, to skip those.
368
300
let mut idx = granting + 1 ;
369
- while let Some ( item) = self . borrows . get ( idx) {
301
+ while let Some ( item) = self . get ( idx) {
370
302
if item. perm == Permission :: SharedReadWrite {
371
303
// Go on.
372
304
idx += 1 ;
@@ -461,16 +393,16 @@ impl<'tcx> Stack {
461
393
// There is a SRW group boundary between the unknown and the known, so everything is incompatible.
462
394
0
463
395
} ;
464
- for item in self . borrows . drain ( first_incompatible_idx..) . rev ( ) {
465
- trace ! ( "access: popping item {:?}" , item) ;
396
+ self . pop_items_after ( first_incompatible_idx, |item| {
466
397
Stack :: item_popped (
467
398
& item,
468
399
Some ( ( tag, alloc_range, offset, access) ) ,
469
400
global,
470
401
alloc_history,
471
402
) ?;
472
403
alloc_history. log_invalidation ( item. tag , alloc_range, current_span) ;
473
- }
404
+ Ok ( ( ) )
405
+ } ) ?;
474
406
} else {
475
407
// On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses.
476
408
// The reason this is not following the stack discipline (by removing the first Unique and
@@ -487,44 +419,39 @@ impl<'tcx> Stack {
487
419
// We are reading from something in the unknown part. That means *all* `Unique` we know about are dead now.
488
420
0
489
421
} ;
490
- for idx in ( first_incompatible_idx..self . borrows . len ( ) ) . rev ( ) {
491
- let item = & mut self . borrows [ idx] ;
492
-
493
- if item. perm == Permission :: Unique {
494
- trace ! ( "access: disabling item {:?}" , item) ;
495
- Stack :: item_popped (
496
- item,
497
- Some ( ( tag, alloc_range, offset, access) ) ,
498
- global,
499
- alloc_history,
500
- ) ?;
501
- item. perm = Permission :: Disabled ;
502
- alloc_history. log_invalidation ( item. tag , alloc_range, current_span) ;
503
- }
504
- }
422
+ self . disable_uniques_starting_at ( first_incompatible_idx, |item| {
423
+ Stack :: item_popped (
424
+ & item,
425
+ Some ( ( tag, alloc_range, offset, access) ) ,
426
+ global,
427
+ alloc_history,
428
+ ) ?;
429
+ alloc_history. log_invalidation ( item. tag , alloc_range, current_span) ;
430
+ Ok ( ( ) )
431
+ } ) ?;
505
432
}
506
433
507
434
// If this was an approximate action, we now collapse everything into an unknown.
508
435
if granting_idx. is_none ( ) || matches ! ( tag, SbTagExtra :: Wildcard ) {
509
436
// Compute the upper bound of the items that remain.
510
437
// (This is why we did all the work above: to reduce the items we have to consider here.)
511
438
let mut max = NonZeroU64 :: new ( 1 ) . unwrap ( ) ;
512
- for item in & self . borrows {
439
+ for i in 0 ..self . len ( ) {
440
+ let item = self . get ( i) . unwrap ( ) ;
513
441
// Skip disabled items, they cannot be matched anyway.
514
442
if !matches ! ( item. perm, Permission :: Disabled ) {
515
443
// We are looking for a strict upper bound, so add 1 to this tag.
516
444
max = cmp:: max ( item. tag . 0 . checked_add ( 1 ) . unwrap ( ) , max) ;
517
445
}
518
446
}
519
- if let Some ( unk) = self . unknown_bottom {
447
+ if let Some ( unk) = self . unknown_bottom ( ) {
520
448
max = cmp:: max ( unk. 0 , max) ;
521
449
}
522
450
// Use `max` as new strict upper bound for everything.
523
451
trace ! (
524
452
"access: forgetting stack to upper bound {max} due to wildcard or unknown access"
525
453
) ;
526
- self . borrows . clear ( ) ;
527
- self . unknown_bottom = Some ( SbTag ( max) ) ;
454
+ self . set_unknown_bottom ( SbTag ( max) ) ;
528
455
}
529
456
530
457
// Done.
@@ -553,7 +480,8 @@ impl<'tcx> Stack {
553
480
} ) ?;
554
481
555
482
// Step 2: Remove all items. Also checks for protectors.
556
- for item in self . borrows . drain ( ..) . rev ( ) {
483
+ for idx in ( 0 ..self . len ( ) ) . rev ( ) {
484
+ let item = self . get ( idx) . unwrap ( ) ;
557
485
Stack :: item_popped ( & item, None , global, alloc_history) ?;
558
486
}
559
487
Ok ( ( ) )
@@ -601,8 +529,7 @@ impl<'tcx> Stack {
601
529
// The new thing is SRW anyway, so we cannot push it "on top of the unkown part"
602
530
// (for all we know, it might join an SRW group inside the unknown).
603
531
trace ! ( "reborrow: forgetting stack entirely due to SharedReadWrite reborrow from wildcard or unknown" ) ;
604
- self . borrows . clear ( ) ;
605
- self . unknown_bottom = Some ( global. next_ptr_tag ) ;
532
+ self . set_unknown_bottom ( global. next_ptr_tag ) ;
606
533
return Ok ( ( ) ) ;
607
534
} ;
608
535
@@ -629,19 +556,18 @@ impl<'tcx> Stack {
629
556
// on top of `derived_from`, and we want the new item at the top so that we
630
557
// get the strongest possible guarantees.
631
558
// This ensures U1 and F1.
632
- self . borrows . len ( )
559
+ self . len ( )
633
560
} ;
634
561
635
562
// Put the new item there. As an optimization, deduplicate if it is equal to one of its new neighbors.
636
563
// `new_idx` might be 0 if we just cleared the entire stack.
637
- if self . borrows . get ( new_idx) == Some ( & new)
638
- || ( new_idx > 0 && self . borrows [ new_idx - 1 ] == new)
564
+ if self . get ( new_idx) == Some ( new) || ( new_idx > 0 && self . get ( new_idx - 1 ) . unwrap ( ) == new)
639
565
{
640
566
// Optimization applies, done.
641
567
trace ! ( "reborrow: avoiding adding redundant item {:?}" , new) ;
642
568
} else {
643
569
trace ! ( "reborrow: adding item {:?}" , new) ;
644
- self . borrows . insert ( new_idx, new) ;
570
+ self . insert ( new_idx, new) ;
645
571
}
646
572
Ok ( ( ) )
647
573
}
@@ -653,8 +579,8 @@ impl<'tcx> Stacks {
653
579
/// Creates new stack with initial tag.
654
580
fn new ( size : Size , perm : Permission , tag : SbTag ) -> Self {
655
581
let item = Item { perm, tag, protector : None } ;
656
- let stack = Stack { borrows : vec ! [ item] , unknown_bottom : None } ;
657
582
583
+ let stack = Stack :: new ( item) ;
658
584
Stacks {
659
585
stacks : RangeMap :: new ( size, stack) ,
660
586
history : AllocHistory :: new ( ) ,
@@ -900,11 +826,14 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
900
826
// We have to use shared references to alloc/memory_extra here since
901
827
// `visit_freeze_sensitive` needs to access the global state.
902
828
let extra = this. get_alloc_extra ( alloc_id) ?;
829
+
903
830
let mut stacked_borrows = extra
904
831
. stacked_borrows
905
832
. as_ref ( )
906
833
. expect ( "we should have Stacked Borrows data" )
907
834
. borrow_mut ( ) ;
835
+ let mut current_span = this. machine . current_span ( ) ;
836
+
908
837
this. visit_freeze_sensitive ( place, size, |mut range, frozen| {
909
838
// Adjust range.
910
839
range. start += base_offset;
@@ -929,7 +858,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
929
858
item,
930
859
( alloc_id, range, offset) ,
931
860
& mut global,
932
- current_span,
861
+ & mut current_span,
933
862
history,
934
863
exposed_tags,
935
864
)
0 commit comments