Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update to nightly-2024-12-15 #144

Merged
merged 12 commits into from
Jan 2, 2025
291 changes: 229 additions & 62 deletions Cargo.lock

Large diffs are not rendered by default.

8 changes: 3 additions & 5 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,12 @@ resolver = "2"

[workspace.dependencies]
serde = { version = "=1.0.149", features = ["derive"] }
rustc_plugin = "=0.12.0-nightly-2024-12-15"
rustc_utils = "=0.12.0-nightly-2024-12-15"

# Make snapshot testing faster
[profile.dev.package.insta]
opt-level = 3

[profile.dev.package.similar]
opt-level = 3

[patch.crates-io]
rustc_plugin = { git = "https://github.com/cognitive-engineering-lab/rustc_plugin", rev = "d4b3c43b0695d42030f9cb3a62fc27cc337019d1" }
rustc_utils = { git = "https://github.com/cognitive-engineering-lab/rustc_plugin", rev = "d4b3c43b0695d42030f9cb3a62fc27cc337019d1" }
opt-level = 3
2 changes: 1 addition & 1 deletion Makefile.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ script = "cargo install --path crates/mdbook-aquascope --locked"
[tasks.playground-front]
script = """
export MIRI_SYSROOT=$(cargo miri setup --print-sysroot)
cd frontend && depot build -w
cd frontend && depot --no-fullscreen build --watch
"""

[tasks.playground.run_task]
Expand Down
6 changes: 3 additions & 3 deletions crates/aquascope/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,13 @@ anyhow = "1.0.0"
log = "0.4"
itertools = "0.10.5"
serde = { workspace = true }
ts-rs = "6.2"
ts-rs = "7"
regex = "1"
fluid-let = "1.0"
rustc_utils = {version = "0.7.0-nightly-2023-08-25", features = ["graphviz", "ts-rs", "serde", "test"]}
rustc_utils = {workspace = true, features = ["graphviz", "ts-rs", "serde", "test"]}

# interpret module
miri = {git = "https://github.com/rust-lang/miri.git", rev = "63c5542edf907dd797db82c4c2979e3c4df71a8b"}
miri = {git = "https://github.com/rust-lang/miri", rev = "afdbb080fe4b8e73838fffdbea8b290aa246f3d7"}
aquascope_workspace_utils = { version = "0.3", path = "../aquascope_workspace_utils" }

# testing utils
Expand Down
31 changes: 15 additions & 16 deletions crates/aquascope/src/analysis/boundaries/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -404,9 +404,9 @@ fn select_candidate_location<'tcx>(
/// Return the constraints that occur nested within a [`HirId`].
///
/// Note, constraints involving regions belonging to the same SCC are removed.
fn flow_constraints_at_hir_id<'a, 'tcx: 'a>(
ctxt: &'a PermissionsCtxt<'a, 'tcx>,
ir_mapper: &'a IRMapper<'a, 'tcx>,
fn flow_constraints_at_hir_id<'tcx>(
ctxt: &PermissionsCtxt<'tcx>,
ir_mapper: &IRMapper<'tcx>,
hir_id: HirId,
) -> Option<Vec<(Origin, Origin, Point)>> {
let mir_locations =
Expand Down Expand Up @@ -543,10 +543,10 @@ fn get_flow_permission(
/// given HIR node. This builds our set of candidate places
/// that we consider for boundary resolution.
#[allow(clippy::wildcard_in_or_patterns)]
fn paths_at_hir_id<'a, 'tcx: 'a>(
fn paths_at_hir_id<'tcx>(
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
ir_mapper: &'a IRMapper<'a, 'tcx>,
body: &'tcx Body<'tcx>,
ir_mapper: &IRMapper<'tcx>,
hir_id: HirId,
) -> Option<Vec<(Location, Place<'tcx>)>> {
type TempBuff<'tcx> = SmallVec<[(Location, Place<'tcx>); 3]>;
Expand Down Expand Up @@ -581,7 +581,6 @@ fn paths_at_hir_id<'a, 'tcx: 'a>(

// Given place cases.
Rvalue::Ref(_, _, place)
| Rvalue::AddressOf(_, place)
| Rvalue::Len(place)
| Rvalue::Discriminant(place)
| Rvalue::CopyForDeref(place)
Expand All @@ -590,11 +589,10 @@ fn paths_at_hir_id<'a, 'tcx: 'a>(
smallvec![(loc, *place)]
}

// Two operand cases
Rvalue::BinaryOp(_, box (left_op, right_op))
| Rvalue::CheckedBinaryOp(_, box (left_op, right_op)) => {
maybe_in_op!(loc, left_op, right_op)
}
// Operand case
Rvalue::BinaryOp(_, box (left_op, right_op)) => {
maybe_in_op!(loc, left_op, right_op)
}

// Unimplemented cases, ignore nested information for now.
//
Expand Down Expand Up @@ -651,6 +649,7 @@ fn paths_at_hir_id<'a, 'tcx: 'a>(
| StatementKind::Coverage(..)
| StatementKind::Intrinsic(..)
| StatementKind::ConstEvalCounter
| StatementKind::BackwardIncompatibleDropHint { .. }
| StatementKind::Nop => smallvec![],
}
};
Expand All @@ -669,9 +668,9 @@ fn paths_at_hir_id<'a, 'tcx: 'a>(
Some(mir_locations)
}

fn path_to_perm_boundary<'a, 'tcx: 'a>(
fn path_to_perm_boundary<'tcx>(
path_boundary: PathBoundary,
analysis: &'a AquascopeAnalysis<'a, 'tcx>,
analysis: &AquascopeAnalysis<'tcx>,
) -> Option<PermissionsBoundary> {
let ctxt = &analysis.permissions;
let ir_mapper = &analysis.ir_mapper;
Expand Down Expand Up @@ -763,8 +762,8 @@ fn path_to_perm_boundary<'a, 'tcx: 'a>(
}

#[allow(clippy::module_name_repetitions)]
pub fn compute_permission_boundaries<'a, 'tcx: 'a>(
analysis: &AquascopeAnalysis<'a, 'tcx>,
pub fn compute_permission_boundaries<'tcx>(
analysis: &AquascopeAnalysis<'tcx>,
) -> Result<Vec<PermissionsBoundary>> {
let ctxt = &analysis.permissions;

Expand Down
28 changes: 14 additions & 14 deletions crates/aquascope/src/analysis/boundaries/path_visitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use rustc_middle::{
hir::nested_filter::OnlyBodies,
ty::{
adjustment::{Adjust, AutoBorrow},
ParamEnv, TyCtxt, TypeckResults,
TyCtxt, TypeckResults, TypingEnv,
},
};
use rustc_span::Span;
Expand All @@ -23,23 +23,23 @@ use crate::analysis::permissions::PermissionsCtxt;
// The current region flow context for outer statements and returns.
fluid_let!(pub static FLOW_CONTEXT: HirId);

struct HirExprScraper<'a, 'tcx: 'a> {
struct HirExprScraper<'tcx> {
tcx: TyCtxt<'tcx>,
typeck_res: &'a TypeckResults<'tcx>,
param_env: ParamEnv<'tcx>,
typeck_res: &'tcx TypeckResults<'tcx>,
typing_env: TypingEnv<'tcx>,
data: Vec<PathBoundary>,
unsupported_feature: Option<(Span, String)>,
}

impl<'a, 'tcx: 'a> HirExprScraper<'a, 'tcx> {
impl<'tcx> HirExprScraper<'tcx> {
fn get_adjusted_permissions(&self, expr: &Expr) -> ExpectedPermissions {
let ty_adj = self.typeck_res.expr_ty_adjusted(expr);
let adjs = self.typeck_res.expr_adjustments(expr);

log::debug!("Path TY-ADJ: {:#?} from {:#?}", ty_adj, adjs);

let is_auto_borrow = adjs.iter().find_map(|adj| {
if let Adjust::Borrow(AutoBorrow::Ref(_, m)) = adj.kind {
if let Adjust::Borrow(AutoBorrow::Ref(m)) = adj.kind {
Some(m)
} else {
None
Expand All @@ -53,15 +53,15 @@ impl<'a, 'tcx: 'a> HirExprScraper<'a, 'tcx> {
// At this point the usage is either a move or a copy. We
// can determine this whether or not the type of the path
// is copyable or not.
if ty_adj.is_copyable(self.tcx, self.param_env) {
if ty_adj.is_copyable(self.tcx, self.typing_env) {
ExpectedPermissions::from_copy()
} else {
ExpectedPermissions::from_move()
}
}
}

impl<'a, 'tcx: 'a> Visitor<'tcx> for HirExprScraper<'a, 'tcx> {
impl<'tcx> Visitor<'tcx> for HirExprScraper<'tcx> {
type NestedFilter = OnlyBodies;

fn nested_visit_map(&mut self) -> Self::Map {
Expand All @@ -71,12 +71,12 @@ impl<'a, 'tcx: 'a> Visitor<'tcx> for HirExprScraper<'a, 'tcx> {
// Visiting statements / body is only used for specifying a
// region flow context. This would not be used for RWO
// path boundaries.
fn visit_body(&mut self, body: &'tcx Body) {
fn visit_body(&mut self, body: &Body<'tcx>) {
fluid_set!(FLOW_CONTEXT, &body.value.hir_id);
intravisit::walk_body(self, body);
}

fn visit_stmt(&mut self, stmt: &'tcx Stmt) {
fn visit_stmt(&mut self, stmt: &'tcx Stmt<'tcx>) {
fluid_set!(FLOW_CONTEXT, &stmt.hir_id);
intravisit::walk_stmt(self, stmt);
}
Expand Down Expand Up @@ -255,16 +255,16 @@ impl<'a, 'tcx: 'a> Visitor<'tcx> for HirExprScraper<'a, 'tcx> {
}
}

pub(super) fn get_path_boundaries<'a, 'tcx: 'a>(
ctxt: &'a PermissionsCtxt<'a, 'tcx>,
pub(super) fn get_path_boundaries<'tcx>(
ctxt: &PermissionsCtxt<'tcx>,
) -> Result<Vec<PathBoundary>> {
let tcx = ctxt.tcx;
let body_id = ctxt.body_id;
let typeck_res = tcx.typeck_body(ctxt.body_id);
let param_env = ctxt.param_env;
let typing_env = ctxt.typing_env;
let mut finder = HirExprScraper {
tcx,
param_env,
typing_env,
typeck_res,
unsupported_feature: None,
data: Vec::default(),
Expand Down
6 changes: 3 additions & 3 deletions crates/aquascope/src/analysis/find_bindings.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
use rustc_data_structures::fx::FxHashMap as HashMap;
use rustc_hir::{
intravisit::{self, Visitor},
BindingAnnotation, HirId, Pat, PatKind,
BindingMode, HirId, Pat, PatKind,
};
// use rustc_hir_analysis;
use rustc_middle::{hir::nested_filter::OnlyBodies, ty::TyCtxt};

struct BindingFinder<'tcx> {
tcx: TyCtxt<'tcx>,
// Mapping a HirId (identifier) with it's binding annotations.
bindings: HashMap<HirId, BindingAnnotation>,
bindings: HashMap<HirId, BindingMode>,
}

impl<'tcx> Visitor<'tcx> for BindingFinder<'tcx> {
Expand All @@ -29,7 +29,7 @@ impl<'tcx> Visitor<'tcx> for BindingFinder<'tcx> {
}
}

pub fn find_bindings(tcx: TyCtxt) -> HashMap<HirId, BindingAnnotation> {
pub fn find_bindings(tcx: TyCtxt) -> HashMap<HirId, BindingMode> {
let mut finder = BindingFinder {
tcx,
bindings: HashMap::default(),
Expand Down
52 changes: 16 additions & 36 deletions crates/aquascope/src/analysis/ir_mapper/body_graph.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use rustc_data_structures::{captures::Captures, graph::*};
use rustc_data_structures::graph::*;
use rustc_middle::mir::{
BasicBlock, BasicBlockData, BasicBlocks, Body, Location, Terminator,
TerminatorKind,
Expand All @@ -11,11 +11,11 @@ use smallvec::SmallVec;
/// regular control-flow. This removes cleanup blocks or those which
/// fall in unwind paths. When mapping back to source-level constructs
/// this is almost certainly what you want to use.
pub(crate) struct CleanedBody<'a, 'tcx: 'a>(pub &'a Body<'tcx>);
pub(crate) struct CleanedBody<'tcx>(pub &'tcx Body<'tcx>);

#[allow(dead_code)]
impl<'a, 'tcx: 'a> CleanedBody<'a, 'tcx> {
pub fn body(&self) -> &'a Body<'tcx> {
impl<'tcx> CleanedBody<'tcx> {
pub fn body(&self) -> &'tcx Body<'tcx> {
self.0
}

Expand Down Expand Up @@ -57,9 +57,7 @@ impl<'a, 'tcx: 'a> CleanedBody<'a, 'tcx> {
self.body().basic_blocks[block].terminator()
}

pub fn blocks(
&self,
) -> impl Iterator<Item = BasicBlock> + Captures<'a> + Captures<'tcx> + '_ {
pub fn blocks(&self) -> impl Iterator<Item = BasicBlock> + use<'tcx, '_> {
self
.0
.basic_blocks
Expand Down Expand Up @@ -99,33 +97,23 @@ impl<'a, 'tcx: 'a> CleanedBody<'a, 'tcx> {
// -----------
// Graph impls

impl DirectedGraph for CleanedBody<'_, '_> {
impl DirectedGraph for CleanedBody<'_> {
type Node = BasicBlock;
}

impl WithStartNode for CleanedBody<'_, '_> {
fn start_node(&self) -> Self::Node {
self.0.basic_blocks.start_node()
}
}

impl<'tcx> WithNumNodes for CleanedBody<'_, 'tcx> {
fn num_nodes(&self) -> usize {
self.0.basic_blocks.len()
}
}

impl<'tcx> GraphSuccessors<'_> for CleanedBody<'_, 'tcx> {
type Item = BasicBlock;
type Iter = smallvec::IntoIter<[BasicBlock; 4]>;
impl StartNode for CleanedBody<'_> {
fn start_node(&self) -> Self::Node {
self.0.basic_blocks.start_node()
}
}

impl<'tcx> WithSuccessors for CleanedBody<'_, 'tcx> {
fn successors(
&self,
node: Self::Node,
) -> <Self as GraphSuccessors<'_>>::Iter {
<BasicBlocks as WithSuccessors>::successors(&self.0.basic_blocks, node)
impl<'tcx> Successors for CleanedBody<'tcx> {
fn successors(&self, node: Self::Node) -> impl Iterator<Item = Self::Node> {
<BasicBlocks as Successors>::successors(&self.0.basic_blocks, node)
.filter(|bb| {
let from_data = &self.0.basic_blocks[*bb];
CleanedBody::keep_block(from_data)
Expand All @@ -136,17 +124,9 @@ impl<'tcx> WithSuccessors for CleanedBody<'_, 'tcx> {
}
}

impl<'tcx> GraphPredecessors<'_> for CleanedBody<'_, 'tcx> {
type Item = BasicBlock;
type Iter = smallvec::IntoIter<[BasicBlock; 4]>;
}

impl<'tcx> WithPredecessors for CleanedBody<'_, 'tcx> {
fn predecessors(
&self,
node: Self::Node,
) -> <Self as GraphSuccessors<'_>>::Iter {
<BasicBlocks as WithPredecessors>::predecessors(&self.0.basic_blocks, node)
impl<'tcx> Predecessors for CleanedBody<'tcx> {
fn predecessors(&self, node: Self::Node) -> impl Iterator<Item = Self::Node> {
<BasicBlocks as Predecessors>::predecessors(&self.0.basic_blocks, node)
.filter(|bb| CleanedBody::keep_block(&self.0.basic_blocks[*bb]))
.collect::<SmallVec<[BasicBlock; 4]>>()
.into_iter()
Expand Down
Loading
Loading