Skip to content

internal: start porting VFS to Salsa #20102

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions crates/cfg/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
let tt = syntax_node_to_token_tree(
tt.syntax(),
DummyTestSpanMap,
DUMMY,
*DUMMY,
DocCommentDesugarMode::ProcMacro,
);
let cfg = CfgExpr::parse(&tt);
Expand All @@ -29,7 +29,7 @@ fn check_dnf(input: &str, expect: Expect) {
let tt = syntax_node_to_token_tree(
tt.syntax(),
DummyTestSpanMap,
DUMMY,
*DUMMY,
DocCommentDesugarMode::ProcMacro,
);
let cfg = CfgExpr::parse(&tt);
Expand All @@ -43,7 +43,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let tt = syntax_node_to_token_tree(
tt.syntax(),
DummyTestSpanMap,
DUMMY,
*DUMMY,
DocCommentDesugarMode::ProcMacro,
);
let cfg = CfgExpr::parse(&tt);
Expand All @@ -59,7 +59,7 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let tt = syntax_node_to_token_tree(
tt.syntax(),
DummyTestSpanMap,
DUMMY,
*DUMMY,
DocCommentDesugarMode::ProcMacro,
);
let cfg = CfgExpr::parse(&tt);
Expand Down
40 changes: 24 additions & 16 deletions crates/hir-expand/src/builtin/quote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,8 @@ impl_to_to_tokentrees! {

#[cfg(test)]
mod tests {
use std::sync::LazyLock;

use crate::tt;
use ::tt::IdentIsRaw;
use expect_test::expect;
Expand All @@ -231,7 +233,7 @@ mod tests {

use super::quote;

const DUMMY: tt::Span = tt::Span {
static DUMMY: LazyLock<tt::Span> = LazyLock::new(|| tt::Span {
range: TextRange::empty(TextSize::new(0)),
anchor: SpanAnchor {
file_id: span::EditionedFileId::new(
Expand All @@ -241,39 +243,43 @@ mod tests {
ast_id: ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContext::root(Edition::CURRENT),
};
});

#[test]
fn test_quote_delimiters() {
assert_eq!(quote!(DUMMY =>{}).to_string(), "{}");
assert_eq!(quote!(DUMMY =>()).to_string(), "()");
assert_eq!(quote!(DUMMY =>[]).to_string(), "[]");
let dummy = *DUMMY;
assert_eq!(quote!(dummy =>{}).to_string(), "{}");
assert_eq!(quote!(dummy =>()).to_string(), "()");
assert_eq!(quote!(dummy =>[]).to_string(), "[]");
}

#[test]
fn test_quote_idents() {
assert_eq!(quote!(DUMMY =>32).to_string(), "32");
assert_eq!(quote!(DUMMY =>struct).to_string(), "struct");
let dummy = *DUMMY;
assert_eq!(quote!(dummy =>32).to_string(), "32");
assert_eq!(quote!(dummy =>struct).to_string(), "struct");
}

#[test]
fn test_quote_hash_simple_literal() {
let a = 20;
assert_eq!(quote!(DUMMY =>#a).to_string(), "20");
let dummy = *DUMMY;
assert_eq!(quote!(dummy =>#a).to_string(), "20");
let s: String = "hello".into();
assert_eq!(quote!(DUMMY =>#s).to_string(), "\"hello\"");
assert_eq!(quote!(dummy =>#s).to_string(), "\"hello\"");
}

fn mk_ident(name: &str) -> crate::tt::Ident {
let (is_raw, s) = IdentIsRaw::split_from_symbol(name);
crate::tt::Ident { sym: Symbol::intern(s), span: DUMMY, is_raw }
crate::tt::Ident { sym: Symbol::intern(s), span: *DUMMY, is_raw }
}

#[test]
fn test_quote_hash_token_tree() {
let a = mk_ident("hello");
let dummy = *DUMMY;

let quoted = quote!(DUMMY =>#a);
let quoted = quote!(dummy =>#a);
assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:#?}");
expect![[r#"
Expand All @@ -286,7 +292,8 @@ mod tests {
fn test_quote_simple_derive_copy() {
let name = mk_ident("Foo");

let quoted = quote! {DUMMY =>
let dummy = *DUMMY;
let quoted = quote! {dummy =>
impl Clone for #name {
fn clone(&self) -> Self {
Self {}
Expand All @@ -304,19 +311,20 @@ mod tests {
// name: String,
// id: u32,
// }
let dummy = *DUMMY;
let struct_name = mk_ident("Foo");
let fields = [mk_ident("name"), mk_ident("id")];
let fields = fields.iter().map(|it| quote!(DUMMY =>#it: self.#it.clone(), ));
let fields = fields.iter().map(|it| quote!(dummy =>#it: self.#it.clone(), ));

let mut builder = tt::TopSubtreeBuilder::new(crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::Brace,
open: DUMMY,
close: DUMMY,
open: dummy,
close: dummy,
});
fields.for_each(|field| builder.extend_with_tt(field.view().as_token_trees()));
let list = builder.build();

let quoted = quote! {DUMMY =>
let quoted = quote! {dummy =>
impl Clone for #struct_name {
fn clone(&self) -> Self {
Self #list
Expand Down
4 changes: 2 additions & 2 deletions crates/ide/src/references.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1718,9 +1718,9 @@ pub use level1::Foo;
expect![[r#"
Foo Struct FileId(0) 0..15 11..14

FileId(3) 16..19 import
FileId(1) 16..19 import
FileId(2) 16..19 import
FileId(3) 16..19 import
"#]],
);
}
Expand Down Expand Up @@ -1748,9 +1748,9 @@ lib::foo!();
expect![[r#"
foo Macro FileId(1) 0..61 29..32

FileId(3) 5..8
FileId(0) 46..49 import
FileId(2) 0..3
FileId(3) 5..8
"#]],
);
}
Expand Down
20 changes: 10 additions & 10 deletions crates/mbe/src/benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ fn benchmark_expand_macro_rules() {
invocations
.into_iter()
.map(|(id, tt)| {
let res = rules[&id].expand(&tt, |_| (), DUMMY, Edition::CURRENT);
let res = rules[&id].expand(&tt, |_| (), *DUMMY, Edition::CURRENT);
assert!(res.err.is_none());
res.value.0.0.len()
})
Expand Down Expand Up @@ -82,7 +82,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::TopSubtree<Span>> {
let def_tt = syntax_node_to_token_tree(
rule.token_tree().unwrap().syntax(),
DummyTestSpanMap,
DUMMY,
*DUMMY,
DocCommentDesugarMode::Mbe,
);
(id, def_tt)
Expand Down Expand Up @@ -114,16 +114,16 @@ fn invocation_fixtures(
let mut try_cnt = 0;
loop {
let mut builder = tt::TopSubtreeBuilder::new(tt::Delimiter {
open: DUMMY,
close: DUMMY,
open: *DUMMY,
close: *DUMMY,
kind: tt::DelimiterKind::Invisible,
});
for op in rule.lhs.iter() {
collect_from_op(op, &mut builder, &mut seed);
}
let subtree = builder.build();

if it.expand(&subtree, |_| (), DUMMY, Edition::CURRENT).err.is_none() {
if it.expand(&subtree, |_| (), *DUMMY, Edition::CURRENT).err.is_none() {
res.push((name.clone(), subtree));
break;
}
Expand Down Expand Up @@ -227,25 +227,25 @@ fn invocation_fixtures(
}
fn make_ident(ident: &str) -> tt::Leaf<Span> {
tt::Leaf::Ident(tt::Ident {
span: DUMMY,
span: *DUMMY,
sym: Symbol::intern(ident),
is_raw: tt::IdentIsRaw::No,
})
}
fn make_punct(char: char) -> tt::Leaf<Span> {
tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone })
tt::Leaf::Punct(tt::Punct { span: *DUMMY, char, spacing: tt::Spacing::Alone })
}
fn make_literal(lit: &str) -> tt::Leaf<Span> {
tt::Leaf::Literal(tt::Literal {
span: DUMMY,
span: *DUMMY,
symbol: Symbol::intern(lit),
kind: tt::LitKind::Str,
suffix: None,
})
}
fn make_subtree(kind: tt::DelimiterKind, builder: &mut tt::TopSubtreeBuilder<Span>) {
builder.open(kind, DUMMY);
builder.close(DUMMY);
builder.open(kind, *DUMMY);
builder.close(*DUMMY);
}
}
}
2 changes: 1 addition & 1 deletion crates/rust-analyzer/src/target_spec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ mod tests {
let tt = syntax_node_to_token_tree(
tt.syntax(),
&DummyTestSpanMap,
DUMMY,
*DUMMY,
DocCommentDesugarMode::Mbe,
);
CfgExpr::parse(&tt)
Expand Down
4 changes: 2 additions & 2 deletions crates/span/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -158,11 +158,11 @@ impl EditionedFileId {
self.0
}

pub const fn file_id(self) -> FileId {
pub fn file_id(self) -> FileId {
FileId::from_raw(self.0 & Self::FILE_ID_MASK)
}

pub const fn unpack(self) -> (FileId, Edition) {
pub fn unpack(self) -> (FileId, Edition) {
(self.file_id(), self.edition())
}

Expand Down
6 changes: 4 additions & 2 deletions crates/syntax-bridge/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,13 @@ impl<S: Copy, SM: SpanMapper<S>> SpanMapper<S> for &SM {
/// Dummy things for testing where spans don't matter.
pub mod dummy_test_span_utils {

use std::sync::LazyLock;

use span::{Span, SyntaxContext};

use super::*;

pub const DUMMY: Span = Span {
pub static DUMMY: LazyLock<Span> = LazyLock::new(|| Span {
range: TextRange::empty(TextSize::new(0)),
anchor: span::SpanAnchor {
file_id: span::EditionedFileId::new(
Expand All @@ -60,7 +62,7 @@ pub mod dummy_test_span_utils {
ast_id: span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContext::root(Edition::CURRENT),
};
});

pub struct DummyTestSpanMap;

Expand Down
2 changes: 1 addition & 1 deletion crates/syntax-bridge/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ fn check_punct_spacing(fixture: &str) {
let subtree = syntax_node_to_token_tree(
source_file.syntax(),
DummyTestSpanMap,
DUMMY,
*DUMMY,
DocCommentDesugarMode::Mbe,
);
let mut annotations: FxHashMap<_, _> = extract_annotations(fixture)
Expand Down
2 changes: 2 additions & 0 deletions crates/vfs/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ fst = "0.4.7"
indexmap.workspace = true
nohash-hasher.workspace = true
crossbeam-channel.workspace = true
salsa.workspace = true
salsa-macros.workspace = true

paths.workspace = true
stdx.workspace = true
Expand Down
Loading
Loading