@@ -75,15 +75,35 @@ impl fmt::Debug for Item {
75
75
}
76
76
77
77
/// Extra per-location state.
78
- #[ derive( Clone , Debug , PartialEq , Eq ) ]
78
+ #[ derive( Clone , Debug ) ]
79
79
pub struct Stack {
80
80
/// Used *mostly* as a stack; never empty.
81
81
/// Invariants:
82
82
/// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`.
83
83
/// * Except for `Untagged`, no tag occurs in the stack more than once.
84
84
borrows : Vec < Item > ,
85
+ /// We often want to know what (if any) tag in the stack exists for some `SbTag`.
86
+ /// The tag we're looking for does not occur often enough at the end of the stack for a linear
87
+ /// search to be efficient, so we use the uniqueness guarantee to keep a map from tag to
88
+ /// position in the stack.
89
+ cache : FxHashMap < PtrId , usize > ,
90
+ /// `Untagged` may occur multiple times so we store it outside of the map.
91
+ untagged : Vec < usize > ,
92
+ /// On a read, we need to disable all `Unique` above the granting item. We can avoid most of
93
+ /// this scan by keeping track of the region of the borrow stack that may contain `Unique`s.
94
+ first_unique : usize ,
95
+ last_unique : usize ,
96
+ }
97
+
98
+ impl PartialEq for Stack {
99
+ fn eq ( & self , other : & Self ) -> bool {
100
+ // All the semantics of Stack are in self.borrows, everything else is caching
101
+ self . borrows == other. borrows
102
+ }
85
103
}
86
104
105
+ impl Eq for Stack { }
106
+
87
107
/// Extra per-allocation state.
88
108
#[ derive( Clone , Debug ) ]
89
109
pub struct Stacks {
@@ -261,17 +281,23 @@ impl<'tcx> Stack {
261
281
/// Find the item granting the given kind of access to the given tag, and return where
262
282
/// it is on the stack.
263
283
fn find_granting ( & self , access : AccessKind , tag : SbTag ) -> Option < usize > {
264
- self . borrows
265
- . iter ( )
266
- . enumerate ( ) // we also need to know *where* in the stack
267
- . rev ( ) // search top-to-bottom
268
- // Return permission of first item that grants access.
269
- // We require a permission with the right tag, ensuring U3 and F3.
270
- . find_map (
271
- |( idx, item) | {
272
- if tag == item. tag && item. perm . grants ( access) { Some ( idx) } else { None }
273
- } ,
274
- )
284
+ match tag {
285
+ SbTag :: Untagged => {
286
+ // Search top-to-bottom
287
+ for idx in self . untagged . iter ( ) . rev ( ) {
288
+ // Return permission of the first item that grants access.
289
+ // We require a permission with the right tag, ensuring U3 and F3.
290
+ if self . borrows [ * idx] . perm . grants ( access) {
291
+ return Some ( * idx) ;
292
+ }
293
+ }
294
+ return None ;
295
+ }
296
+ SbTag :: Tagged ( id) =>
297
+ self . cache . get ( & id) . and_then ( |idx| {
298
+ if self . borrows [ * idx] . perm . grants ( access) { Some ( * idx) } else { None }
299
+ } ) ,
300
+ }
275
301
}
276
302
277
303
/// Find the first write-incompatible item above the given one --
@@ -366,6 +392,23 @@ impl<'tcx> Stack {
366
392
for item in self . borrows . drain ( first_incompatible_idx..) . rev ( ) {
367
393
trace ! ( "access: popping item {:?}" , item) ;
368
394
Stack :: check_protector ( & item, Some ( ( tag, access) ) , global) ?;
395
+ // The drain removes elements from `borrows`, but we need to remove those elements
396
+ // from the lookup cache too.
397
+ if let SbTag :: Tagged ( id) = item. tag {
398
+ assert ! ( self . cache. remove( & id) . is_some( ) ) ;
399
+ }
400
+ }
401
+
402
+ while self . untagged . last ( ) >= Some ( & first_incompatible_idx) {
403
+ self . untagged . pop ( ) ;
404
+ }
405
+ if first_incompatible_idx <= self . first_unique {
406
+ // We removed all the Unique items
407
+ self . first_unique = 0 ;
408
+ self . last_unique = 0 ;
409
+ } else {
410
+ // Ensure the range doesn't extend past the new top of the stack
411
+ self . last_unique = self . last_unique . min ( first_incompatible_idx. saturating_sub ( 1 ) ) ;
369
412
}
370
413
} else {
371
414
// On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses.
@@ -376,14 +419,26 @@ impl<'tcx> Stack {
376
419
// This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared
377
420
// reference and use that.
378
421
// We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs.
379
- for idx in ( ( granting_idx + 1 ) ..self . borrows . len ( ) ) . rev ( ) {
380
- let item = & mut self . borrows [ idx] ;
381
- if item. perm == Permission :: Unique {
382
- trace ! ( "access: disabling item {:?}" , item) ;
383
- Stack :: check_protector ( item, Some ( ( tag, access) ) , global) ?;
384
- item. perm = Permission :: Disabled ;
422
+ if granting_idx < self . last_unique {
423
+ // add 1 so we don't disable the granting item
424
+ let lower = self . first_unique . max ( granting_idx + 1 ) ;
425
+ //let upper = (self.last_unique + 1).min(self.borrows.len());
426
+ for item in & mut self . borrows [ lower..=self . last_unique ] {
427
+ if item. perm == Permission :: Unique {
428
+ trace ! ( "access: disabling item {:?}" , item) ;
429
+ Stack :: check_protector ( item, Some ( ( tag, access) ) , global) ?;
430
+ item. perm = Permission :: Disabled ;
431
+ }
385
432
}
386
433
}
434
+ if granting_idx < self . first_unique {
435
+ // We disabled all Unique items
436
+ self . first_unique = 0 ;
437
+ self . last_unique = 0 ;
438
+ } else {
439
+ // Truncate the range to granting_idx
440
+ self . last_unique = self . last_unique . min ( granting_idx) ;
441
+ }
387
442
}
388
443
389
444
// Done.
@@ -411,6 +466,11 @@ impl<'tcx> Stack {
411
466
Stack :: check_protector ( & item, None , global) ?;
412
467
}
413
468
469
+ self . cache . clear ( ) ;
470
+ self . untagged . clear ( ) ;
471
+ self . first_unique = 0 ;
472
+ self . last_unique = 0 ;
473
+
414
474
Ok ( ( ) )
415
475
}
416
476
@@ -469,6 +529,43 @@ impl<'tcx> Stack {
469
529
} else {
470
530
trace ! ( "reborrow: adding item {:?}" , new) ;
471
531
self . borrows . insert ( new_idx, new) ;
532
+ // The above insert changes the meaning of every index in the cache >= new_idx, so now
533
+ // we need to find every one of those indexes and increment it.
534
+
535
+ // Adjust the possibly-unique range if an insert occurs before or within it
536
+ if self . first_unique >= new_idx {
537
+ self . first_unique += 1 ;
538
+ }
539
+ if self . last_unique >= new_idx {
540
+ self . last_unique += 1 ;
541
+ }
542
+ if new. perm == Permission :: Unique {
543
+ // Make sure the possibly-unique range contains the new borrow
544
+ self . first_unique = self . first_unique . min ( new_idx) ;
545
+ self . last_unique = self . last_unique . max ( new_idx) ;
546
+ }
547
+ // Repair the lookup cache; indices have shifted if an insert was not at the end
548
+ if new_idx != self . borrows . len ( ) - 1 {
549
+ for item in & self . borrows [ new_idx + 1 ..] {
550
+ if let SbTag :: Tagged ( id) = item. tag {
551
+ * self . cache . get_mut ( & id) . unwrap ( ) += 1 ;
552
+ }
553
+ }
554
+ for idx in self . untagged . iter_mut ( ) . rev ( ) . take_while ( |idx| * * idx >= new_idx) {
555
+ * idx += 1 ;
556
+ }
557
+ }
558
+ // We've now made a (conceptual) hole in the tag lookup cache.
559
+ // Nothing maps to new_idx, because we've adjusted everything >= it.
560
+ match new. tag {
561
+ SbTag :: Untagged => {
562
+ self . untagged . push ( new_idx) ;
563
+ self . untagged . sort ( ) ;
564
+ }
565
+ SbTag :: Tagged ( id) => {
566
+ self . cache . insert ( id, new_idx) ;
567
+ }
568
+ }
472
569
}
473
570
474
571
Ok ( ( ) )
@@ -547,7 +644,17 @@ impl<'tcx> Stacks {
547
644
/// Creates new stack with initial tag.
548
645
fn new ( size : Size , perm : Permission , tag : SbTag ) -> Self {
549
646
let item = Item { perm, tag, protector : None } ;
550
- let stack = Stack { borrows : vec ! [ item] } ;
647
+ let mut cache = FxHashMap :: default ( ) ;
648
+ let mut untagged = Vec :: new ( ) ;
649
+ match item. tag {
650
+ SbTag :: Untagged => {
651
+ untagged. push ( 0 ) ;
652
+ }
653
+ SbTag :: Tagged ( id) => {
654
+ cache. insert ( id, 0 ) ;
655
+ }
656
+ }
657
+ let stack = Stack { borrows : vec ! [ item] , cache, untagged, first_unique : 0 , last_unique : 0 } ;
551
658
552
659
Stacks { stacks : RefCell :: new ( RangeMap :: new ( size, stack) ) }
553
660
}
0 commit comments