diff --git a/Cargo.lock b/Cargo.lock index caa8f28d8e2f..789cc3d6816c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2610,6 +2610,8 @@ dependencies = [ "nohash-hasher", "paths", "rustc-hash 2.1.1", + "salsa", + "salsa-macros", "stdx", "tracing", ] diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index 6766748097f0..22c4c1865a4a 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -16,7 +16,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) { let tt = syntax_node_to_token_tree( tt.syntax(), DummyTestSpanMap, - DUMMY, + *DUMMY, DocCommentDesugarMode::ProcMacro, ); let cfg = CfgExpr::parse(&tt); @@ -29,7 +29,7 @@ fn check_dnf(input: &str, expect: Expect) { let tt = syntax_node_to_token_tree( tt.syntax(), DummyTestSpanMap, - DUMMY, + *DUMMY, DocCommentDesugarMode::ProcMacro, ); let cfg = CfgExpr::parse(&tt); @@ -43,7 +43,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { let tt = syntax_node_to_token_tree( tt.syntax(), DummyTestSpanMap, - DUMMY, + *DUMMY, DocCommentDesugarMode::ProcMacro, ); let cfg = CfgExpr::parse(&tt); @@ -59,7 +59,7 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { let tt = syntax_node_to_token_tree( tt.syntax(), DummyTestSpanMap, - DUMMY, + *DUMMY, DocCommentDesugarMode::ProcMacro, ); let cfg = CfgExpr::parse(&tt); diff --git a/crates/hir-expand/src/builtin/quote.rs b/crates/hir-expand/src/builtin/quote.rs index d5874f829ba5..637c3dcbec10 100644 --- a/crates/hir-expand/src/builtin/quote.rs +++ b/crates/hir-expand/src/builtin/quote.rs @@ -222,6 +222,8 @@ impl_to_to_tokentrees! { #[cfg(test)] mod tests { + use std::sync::LazyLock; + use crate::tt; use ::tt::IdentIsRaw; use expect_test::expect; @@ -231,7 +233,7 @@ mod tests { use super::quote; - const DUMMY: tt::Span = tt::Span { + static DUMMY: LazyLock = LazyLock::new(|| tt::Span { range: TextRange::empty(TextSize::new(0)), anchor: SpanAnchor { file_id: span::EditionedFileId::new( @@ -241,39 +243,43 @@ mod tests { ast_id: ROOT_ERASED_FILE_AST_ID, }, ctx: SyntaxContext::root(Edition::CURRENT), - }; + }); #[test] fn test_quote_delimiters() { - assert_eq!(quote!(DUMMY =>{}).to_string(), "{}"); - assert_eq!(quote!(DUMMY =>()).to_string(), "()"); - assert_eq!(quote!(DUMMY =>[]).to_string(), "[]"); + let dummy = *DUMMY; + assert_eq!(quote!(dummy =>{}).to_string(), "{}"); + assert_eq!(quote!(dummy =>()).to_string(), "()"); + assert_eq!(quote!(dummy =>[]).to_string(), "[]"); } #[test] fn test_quote_idents() { - assert_eq!(quote!(DUMMY =>32).to_string(), "32"); - assert_eq!(quote!(DUMMY =>struct).to_string(), "struct"); + let dummy = *DUMMY; + assert_eq!(quote!(dummy =>32).to_string(), "32"); + assert_eq!(quote!(dummy =>struct).to_string(), "struct"); } #[test] fn test_quote_hash_simple_literal() { let a = 20; - assert_eq!(quote!(DUMMY =>#a).to_string(), "20"); + let dummy = *DUMMY; + assert_eq!(quote!(dummy =>#a).to_string(), "20"); let s: String = "hello".into(); - assert_eq!(quote!(DUMMY =>#s).to_string(), "\"hello\""); + assert_eq!(quote!(dummy =>#s).to_string(), "\"hello\""); } fn mk_ident(name: &str) -> crate::tt::Ident { let (is_raw, s) = IdentIsRaw::split_from_symbol(name); - crate::tt::Ident { sym: Symbol::intern(s), span: DUMMY, is_raw } + crate::tt::Ident { sym: Symbol::intern(s), span: *DUMMY, is_raw } } #[test] fn test_quote_hash_token_tree() { let a = mk_ident("hello"); + let dummy = *DUMMY; - let quoted = quote!(DUMMY =>#a); + let quoted = quote!(dummy =>#a); assert_eq!(quoted.to_string(), "hello"); let t = format!("{quoted:#?}"); expect![[r#" @@ -286,7 +292,8 @@ mod tests { fn test_quote_simple_derive_copy() { let name = mk_ident("Foo"); - let quoted = quote! {DUMMY => + let dummy = *DUMMY; + let quoted = quote! {dummy => impl Clone for #name { fn clone(&self) -> Self { Self {} @@ -304,19 +311,20 @@ mod tests { // name: String, // id: u32, // } + let dummy = *DUMMY; let struct_name = mk_ident("Foo"); let fields = [mk_ident("name"), mk_ident("id")]; - let fields = fields.iter().map(|it| quote!(DUMMY =>#it: self.#it.clone(), )); + let fields = fields.iter().map(|it| quote!(dummy =>#it: self.#it.clone(), )); let mut builder = tt::TopSubtreeBuilder::new(crate::tt::Delimiter { kind: crate::tt::DelimiterKind::Brace, - open: DUMMY, - close: DUMMY, + open: dummy, + close: dummy, }); fields.for_each(|field| builder.extend_with_tt(field.view().as_token_trees())); let list = builder.build(); - let quoted = quote! {DUMMY => + let quoted = quote! {dummy => impl Clone for #struct_name { fn clone(&self) -> Self { Self #list diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index c6a323d40815..1516b301717d 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -1718,9 +1718,9 @@ pub use level1::Foo; expect![[r#" Foo Struct FileId(0) 0..15 11..14 + FileId(3) 16..19 import FileId(1) 16..19 import FileId(2) 16..19 import - FileId(3) 16..19 import "#]], ); } @@ -1748,9 +1748,9 @@ lib::foo!(); expect![[r#" foo Macro FileId(1) 0..61 29..32 + FileId(3) 5..8 FileId(0) 46..49 import FileId(2) 0..3 - FileId(3) 5..8 "#]], ); } diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 04ac85ad43dd..eb68365a2f5c 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -52,7 +52,7 @@ fn benchmark_expand_macro_rules() { invocations .into_iter() .map(|(id, tt)| { - let res = rules[&id].expand(&tt, |_| (), DUMMY, Edition::CURRENT); + let res = rules[&id].expand(&tt, |_| (), *DUMMY, Edition::CURRENT); assert!(res.err.is_none()); res.value.0.0.len() }) @@ -82,7 +82,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap> { let def_tt = syntax_node_to_token_tree( rule.token_tree().unwrap().syntax(), DummyTestSpanMap, - DUMMY, + *DUMMY, DocCommentDesugarMode::Mbe, ); (id, def_tt) @@ -114,8 +114,8 @@ fn invocation_fixtures( let mut try_cnt = 0; loop { let mut builder = tt::TopSubtreeBuilder::new(tt::Delimiter { - open: DUMMY, - close: DUMMY, + open: *DUMMY, + close: *DUMMY, kind: tt::DelimiterKind::Invisible, }); for op in rule.lhs.iter() { @@ -123,7 +123,7 @@ fn invocation_fixtures( } let subtree = builder.build(); - if it.expand(&subtree, |_| (), DUMMY, Edition::CURRENT).err.is_none() { + if it.expand(&subtree, |_| (), *DUMMY, Edition::CURRENT).err.is_none() { res.push((name.clone(), subtree)); break; } @@ -227,25 +227,25 @@ fn invocation_fixtures( } fn make_ident(ident: &str) -> tt::Leaf { tt::Leaf::Ident(tt::Ident { - span: DUMMY, + span: *DUMMY, sym: Symbol::intern(ident), is_raw: tt::IdentIsRaw::No, }) } fn make_punct(char: char) -> tt::Leaf { - tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone }) + tt::Leaf::Punct(tt::Punct { span: *DUMMY, char, spacing: tt::Spacing::Alone }) } fn make_literal(lit: &str) -> tt::Leaf { tt::Leaf::Literal(tt::Literal { - span: DUMMY, + span: *DUMMY, symbol: Symbol::intern(lit), kind: tt::LitKind::Str, suffix: None, }) } fn make_subtree(kind: tt::DelimiterKind, builder: &mut tt::TopSubtreeBuilder) { - builder.open(kind, DUMMY); - builder.close(DUMMY); + builder.open(kind, *DUMMY); + builder.close(*DUMMY); } } } diff --git a/crates/rust-analyzer/src/target_spec.rs b/crates/rust-analyzer/src/target_spec.rs index 7132e09146eb..b957be589ed5 100644 --- a/crates/rust-analyzer/src/target_spec.rs +++ b/crates/rust-analyzer/src/target_spec.rs @@ -280,7 +280,7 @@ mod tests { let tt = syntax_node_to_token_tree( tt.syntax(), &DummyTestSpanMap, - DUMMY, + *DUMMY, DocCommentDesugarMode::Mbe, ); CfgExpr::parse(&tt) diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs index b81d08eed6d8..c0672975dafd 100644 --- a/crates/span/src/lib.rs +++ b/crates/span/src/lib.rs @@ -158,11 +158,11 @@ impl EditionedFileId { self.0 } - pub const fn file_id(self) -> FileId { + pub fn file_id(self) -> FileId { FileId::from_raw(self.0 & Self::FILE_ID_MASK) } - pub const fn unpack(self) -> (FileId, Edition) { + pub fn unpack(self) -> (FileId, Edition) { (self.file_id(), self.edition()) } diff --git a/crates/syntax-bridge/src/lib.rs b/crates/syntax-bridge/src/lib.rs index d59229952f52..665abacbfa7e 100644 --- a/crates/syntax-bridge/src/lib.rs +++ b/crates/syntax-bridge/src/lib.rs @@ -46,11 +46,13 @@ impl> SpanMapper for &SM { /// Dummy things for testing where spans don't matter. pub mod dummy_test_span_utils { + use std::sync::LazyLock; + use span::{Span, SyntaxContext}; use super::*; - pub const DUMMY: Span = Span { + pub static DUMMY: LazyLock = LazyLock::new(|| Span { range: TextRange::empty(TextSize::new(0)), anchor: span::SpanAnchor { file_id: span::EditionedFileId::new( @@ -60,7 +62,7 @@ pub mod dummy_test_span_utils { ast_id: span::ROOT_ERASED_FILE_AST_ID, }, ctx: SyntaxContext::root(Edition::CURRENT), - }; + }); pub struct DummyTestSpanMap; diff --git a/crates/syntax-bridge/src/tests.rs b/crates/syntax-bridge/src/tests.rs index 8871bf56a5df..07342cec8045 100644 --- a/crates/syntax-bridge/src/tests.rs +++ b/crates/syntax-bridge/src/tests.rs @@ -15,7 +15,7 @@ fn check_punct_spacing(fixture: &str) { let subtree = syntax_node_to_token_tree( source_file.syntax(), DummyTestSpanMap, - DUMMY, + *DUMMY, DocCommentDesugarMode::Mbe, ); let mut annotations: FxHashMap<_, _> = extract_annotations(fixture) diff --git a/crates/vfs/Cargo.toml b/crates/vfs/Cargo.toml index e8a6195036ed..d64e7d4b3f7c 100644 --- a/crates/vfs/Cargo.toml +++ b/crates/vfs/Cargo.toml @@ -19,6 +19,8 @@ fst = "0.4.7" indexmap.workspace = true nohash-hasher.workspace = true crossbeam-channel.workspace = true +salsa.workspace = true +salsa-macros.workspace = true paths.workspace = true stdx.workspace = true diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index 50e388d78002..cf3c8ba4dcf7 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -62,22 +62,43 @@ use tracing::{Level, span}; /// Handle to a file in [`Vfs`] /// /// Most functions in rust-analyzer use this when they need to refer to a file. -#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub struct FileId(u32); -// pub struct FileId(NonMaxU32); +#[salsa_macros::input] +pub struct FileId { + path: VfsPath, +} + +impl PartialOrd for FileId { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for FileId { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.0.as_u32().cmp(&other.0.as_u32()) + } +} + +impl fmt::Debug for FileId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_tuple("FileId").field(&self.0.as_u32()).finish() + } +} impl FileId { const MAX: u32 = 0x7fff_ffff; #[inline] - pub const fn from_raw(raw: u32) -> FileId { + pub fn from_raw(raw: u32) -> FileId { + use salsa::plumbing::FromId; assert!(raw <= Self::MAX); - FileId(raw) + let id = unsafe { salsa::Id::from_u32(raw) }; + FileId::from_id(id) } #[inline] pub const fn index(self) -> u32 { - self.0 + self.0.as_u32() } } @@ -199,7 +220,7 @@ impl Vfs { /// This will skip deleted files. pub fn iter(&self) -> impl Iterator + '_ { (0..self.data.len()) - .map(|it| FileId(it as u32)) + .map(|it| FileId::from_raw(it as u32)) .filter(move |&file_id| matches!(self.get(file_id), FileState::Exists(_))) .map(move |file_id| { let path = self.interner.lookup(file_id); @@ -235,7 +256,7 @@ impl Vfs { }; let mut set_data = |change_kind| { - self.data[file_id.0 as usize] = match change_kind { + self.data[file_id.0.as_u32() as usize] = match change_kind { &Change::Create(_, hash) | &Change::Modify(_, hash) => FileState::Exists(hash), Change::Delete => FileState::Deleted, }; @@ -299,7 +320,7 @@ impl Vfs { /// Does not record a change. fn alloc_file_id(&mut self, path: VfsPath) -> FileId { let file_id = self.interner.intern(path); - let idx = file_id.0 as usize; + let idx = file_id.0.as_u32() as usize; let len = self.data.len().max(idx + 1); self.data.resize(len, FileState::Deleted); file_id @@ -311,14 +332,14 @@ impl Vfs { /// /// Panics if no file is associated to that id. fn get(&self, file_id: FileId) -> FileState { - self.data[file_id.0 as usize] + self.data[file_id.0.as_u32() as usize] } /// We cannot ignore excluded files, because this will lead to errors when the client /// requests semantic information for them, so we instead mark them specially. pub fn insert_excluded_file(&mut self, path: VfsPath) { let file_id = self.alloc_file_id(path); - self.data[file_id.0 as usize] = FileState::Excluded; + self.data[file_id.0.as_u32() as usize] = FileState::Excluded; } } diff --git a/crates/vfs/src/path_interner.rs b/crates/vfs/src/path_interner.rs index 64f51976053d..80bee0dc71e6 100644 --- a/crates/vfs/src/path_interner.rs +++ b/crates/vfs/src/path_interner.rs @@ -19,7 +19,7 @@ impl PathInterner { /// /// If `path` does not exists in `self`, returns [`None`]. pub(crate) fn get(&self, path: &VfsPath) -> Option { - self.map.get_index_of(path).map(|i| FileId(i as u32)) + self.map.get_index_of(path).map(|i| FileId::from_raw(i as u32)) } /// Insert `path` in `self`. @@ -29,7 +29,7 @@ impl PathInterner { pub(crate) fn intern(&mut self, path: VfsPath) -> FileId { let (id, _added) = self.map.insert_full(path); assert!(id < u32::MAX as usize); - FileId(id as u32) + FileId::from_raw(id as u32) } /// Returns the path corresponding to `id`. @@ -38,6 +38,6 @@ impl PathInterner { /// /// Panics if `id` does not exists in `self`. pub(crate) fn lookup(&self, id: FileId) -> &VfsPath { - self.map.get_index(id.0 as usize).unwrap() + self.map.get_index(id.0.as_u32() as usize).unwrap() } }