Skip to content

Commit 0716f4a

Browse files
committed
Remove linear searches of borrow stacks
Adding a HashMap from tags to stack locations lets us find a granting item in amortized constant time, or with a linear search of the very small array of untagged locations. Additionally, we keep a range which denotes where there might be an item granting Unique permission in the stack, so that when we invalidate Uniques we do not need to scan much of the stack, and often scan nothing at all.
1 parent 8e818ff commit 0716f4a

File tree

1 file changed

+126
-19
lines changed

1 file changed

+126
-19
lines changed

src/stacked_borrows.rs

+126-19
Original file line numberDiff line numberDiff line change
@@ -75,15 +75,35 @@ impl fmt::Debug for Item {
7575
}
7676

7777
/// Extra per-location state.
78-
#[derive(Clone, Debug, PartialEq, Eq)]
78+
#[derive(Clone, Debug)]
7979
pub struct Stack {
8080
/// Used *mostly* as a stack; never empty.
8181
/// Invariants:
8282
/// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`.
8383
/// * Except for `Untagged`, no tag occurs in the stack more than once.
8484
borrows: Vec<Item>,
85+
/// We often want to know what (if any) tag in the stack exists for some `SbTag`.
86+
/// The tag we're looking for does not occur often enough at the end of the stack for a linear
87+
/// search to be efficient, so we use the uniqueness guarantee to keep a map from tag to
88+
/// position in the stack.
89+
cache: FxHashMap<PtrId, usize>,
90+
/// `Untagged` may occur multiple times so we store it outside of the map.
91+
untagged: Vec<usize>,
92+
/// On a read, we need to disable all `Unique` above the granting item. We can avoid most of
93+
/// this scan by keeping track of the region of the borrow stack that may contain `Unique`s.
94+
first_unique: usize,
95+
last_unique: usize,
96+
}
97+
98+
impl PartialEq for Stack {
99+
fn eq(&self, other: &Self) -> bool {
100+
// All the semantics of Stack are in self.borrows, everything else is caching
101+
self.borrows == other.borrows
102+
}
85103
}
86104

105+
impl Eq for Stack {}
106+
87107
/// Extra per-allocation state.
88108
#[derive(Clone, Debug)]
89109
pub struct Stacks {
@@ -261,17 +281,23 @@ impl<'tcx> Stack {
261281
/// Find the item granting the given kind of access to the given tag, and return where
262282
/// it is on the stack.
263283
fn find_granting(&self, access: AccessKind, tag: SbTag) -> Option<usize> {
264-
self.borrows
265-
.iter()
266-
.enumerate() // we also need to know *where* in the stack
267-
.rev() // search top-to-bottom
268-
// Return permission of first item that grants access.
269-
// We require a permission with the right tag, ensuring U3 and F3.
270-
.find_map(
271-
|(idx, item)| {
272-
if tag == item.tag && item.perm.grants(access) { Some(idx) } else { None }
273-
},
274-
)
284+
match tag {
285+
SbTag::Untagged => {
286+
// Search top-to-bottom
287+
for idx in self.untagged.iter().rev() {
288+
// Return permission of the first item that grants access.
289+
// We require a permission with the right tag, ensuring U3 and F3.
290+
if self.borrows[*idx].perm.grants(access) {
291+
return Some(*idx);
292+
}
293+
}
294+
return None;
295+
}
296+
SbTag::Tagged(id) =>
297+
self.cache.get(&id).and_then(|idx| {
298+
if self.borrows[*idx].perm.grants(access) { Some(*idx) } else { None }
299+
}),
300+
}
275301
}
276302

277303
/// Find the first write-incompatible item above the given one --
@@ -366,6 +392,23 @@ impl<'tcx> Stack {
366392
for item in self.borrows.drain(first_incompatible_idx..).rev() {
367393
trace!("access: popping item {:?}", item);
368394
Stack::check_protector(&item, Some((tag, access)), global)?;
395+
// The drain removes elements from `borrows`, but we need to remove those elements
396+
// from the lookup cache too.
397+
if let SbTag::Tagged(id) = item.tag {
398+
assert!(self.cache.remove(&id).is_some());
399+
}
400+
}
401+
402+
while self.untagged.last() >= Some(&first_incompatible_idx) {
403+
self.untagged.pop();
404+
}
405+
if first_incompatible_idx <= self.first_unique {
406+
// We removed all the Unique items
407+
self.first_unique = 0;
408+
self.last_unique = 0;
409+
} else {
410+
// Ensure the range doesn't extend past the new top of the stack
411+
self.last_unique = self.last_unique.min(first_incompatible_idx.saturating_sub(1));
369412
}
370413
} else {
371414
// On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses.
@@ -376,14 +419,26 @@ impl<'tcx> Stack {
376419
// This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared
377420
// reference and use that.
378421
// We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs.
379-
for idx in ((granting_idx + 1)..self.borrows.len()).rev() {
380-
let item = &mut self.borrows[idx];
381-
if item.perm == Permission::Unique {
382-
trace!("access: disabling item {:?}", item);
383-
Stack::check_protector(item, Some((tag, access)), global)?;
384-
item.perm = Permission::Disabled;
422+
if granting_idx < self.last_unique {
423+
// add 1 so we don't disable the granting item
424+
let lower = self.first_unique.max(granting_idx + 1);
425+
//let upper = (self.last_unique + 1).min(self.borrows.len());
426+
for item in &mut self.borrows[lower..=self.last_unique] {
427+
if item.perm == Permission::Unique {
428+
trace!("access: disabling item {:?}", item);
429+
Stack::check_protector(item, Some((tag, access)), global)?;
430+
item.perm = Permission::Disabled;
431+
}
385432
}
386433
}
434+
if granting_idx < self.first_unique {
435+
// We disabled all Unique items
436+
self.first_unique = 0;
437+
self.last_unique = 0;
438+
} else {
439+
// Truncate the range to granting_idx
440+
self.last_unique = self.last_unique.min(granting_idx);
441+
}
387442
}
388443

389444
// Done.
@@ -411,6 +466,11 @@ impl<'tcx> Stack {
411466
Stack::check_protector(&item, None, global)?;
412467
}
413468

469+
self.cache.clear();
470+
self.untagged.clear();
471+
self.first_unique = 0;
472+
self.last_unique = 0;
473+
414474
Ok(())
415475
}
416476

@@ -469,6 +529,43 @@ impl<'tcx> Stack {
469529
} else {
470530
trace!("reborrow: adding item {:?}", new);
471531
self.borrows.insert(new_idx, new);
532+
// The above insert changes the meaning of every index in the cache >= new_idx, so now
533+
// we need to find every one of those indexes and increment it.
534+
535+
// Adjust the possibly-unique range if an insert occurs before or within it
536+
if self.first_unique >= new_idx {
537+
self.first_unique += 1;
538+
}
539+
if self.last_unique >= new_idx {
540+
self.last_unique += 1;
541+
}
542+
if new.perm == Permission::Unique {
543+
// Make sure the possibly-unique range contains the new borrow
544+
self.first_unique = self.first_unique.min(new_idx);
545+
self.last_unique = self.last_unique.max(new_idx);
546+
}
547+
// Repair the lookup cache; indices have shifted if an insert was not at the end
548+
if new_idx != self.borrows.len() - 1 {
549+
for item in &self.borrows[new_idx + 1..] {
550+
if let SbTag::Tagged(id) = item.tag {
551+
*self.cache.get_mut(&id).unwrap() += 1;
552+
}
553+
}
554+
for idx in self.untagged.iter_mut().rev().take_while(|idx| **idx >= new_idx) {
555+
*idx += 1;
556+
}
557+
}
558+
// We've now made a (conceptual) hole in the tag lookup cache.
559+
// Nothing maps to new_idx, because we've adjusted everything >= it.
560+
match new.tag {
561+
SbTag::Untagged => {
562+
self.untagged.push(new_idx);
563+
self.untagged.sort();
564+
}
565+
SbTag::Tagged(id) => {
566+
self.cache.insert(id, new_idx);
567+
}
568+
}
472569
}
473570

474571
Ok(())
@@ -547,7 +644,17 @@ impl<'tcx> Stacks {
547644
/// Creates new stack with initial tag.
548645
fn new(size: Size, perm: Permission, tag: SbTag) -> Self {
549646
let item = Item { perm, tag, protector: None };
550-
let stack = Stack { borrows: vec![item] };
647+
let mut cache = FxHashMap::default();
648+
let mut untagged = Vec::new();
649+
match item.tag {
650+
SbTag::Untagged => {
651+
untagged.push(0);
652+
}
653+
SbTag::Tagged(id) => {
654+
cache.insert(id, 0);
655+
}
656+
}
657+
let stack = Stack { borrows: vec![item], cache, untagged, first_unique: 0, last_unique: 0 };
551658

552659
Stacks { stacks: RefCell::new(RangeMap::new(size, stack)) }
553660
}

0 commit comments

Comments
 (0)