Skip to content

Commit e803dba

Browse files
committed
Refactor full highlight request
1 parent 0cf8c7d commit e803dba

File tree

4 files changed

+62
-57
lines changed

4 files changed

+62
-57
lines changed

src/features/semantic_tokens.rs

Lines changed: 46 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -6,54 +6,40 @@ use lsp_types::{
66
Position, Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
77
SemanticTokensLegend, Url,
88
};
9-
use rowan::TextRange;
9+
use rowan::{TextLen, TextRange};
1010

1111
use crate::{
12-
db::Workspace,
12+
db::{Document, Workspace},
1313
util::{line_index::LineIndex, line_index_ext::LineIndexExt},
1414
Db,
1515
};
1616

1717
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
1818
#[repr(u32)]
19-
pub enum TokenKind {
19+
enum TokenKind {
2020
Label = 0,
2121
MathDelimiter = 1,
2222
}
2323

2424
bitflags! {
2525
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
26-
pub struct TokenModifiers: u32 {
26+
struct TokenModifiers: u32 {
2727
const NONE = 0;
2828
const UNDEFINED = 1;
2929
const UNUSED = 2;
3030
const DEPRECATED = 4;
3131
}
3232
}
3333

34-
pub fn legend() -> SemanticTokensLegend {
35-
SemanticTokensLegend {
36-
token_types: vec![
37-
SemanticTokenType::new("label"),
38-
SemanticTokenType::new("mathDelimiter"),
39-
],
40-
token_modifiers: vec![
41-
SemanticTokenModifier::new("undefined"),
42-
SemanticTokenModifier::new("unused"),
43-
SemanticTokenModifier::new("deprecated"),
44-
],
45-
}
46-
}
47-
4834
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
49-
pub struct Token {
50-
pub range: TextRange,
51-
pub kind: TokenKind,
52-
pub modifiers: TokenModifiers,
35+
struct Token {
36+
range: TextRange,
37+
kind: TokenKind,
38+
modifiers: TokenModifiers,
5339
}
5440

5541
#[derive(Debug, Default)]
56-
pub struct TokenBuilder {
42+
struct TokenBuilder {
5743
tokens: Vec<Token>,
5844
}
5945

@@ -106,12 +92,45 @@ impl TokenBuilder {
10692
}
10793
}
10894

109-
pub fn find_all(db: &dyn Db, uri: &Url, viewport: Range) -> Option<SemanticTokens> {
95+
#[derive(Clone, Copy)]
96+
struct Context<'db> {
97+
db: &'db dyn Db,
98+
document: Document,
99+
viewport: TextRange,
100+
}
101+
102+
pub fn legend() -> SemanticTokensLegend {
103+
SemanticTokensLegend {
104+
token_types: vec![
105+
SemanticTokenType::new("label"),
106+
SemanticTokenType::new("mathDelimiter"),
107+
],
108+
token_modifiers: vec![
109+
SemanticTokenModifier::new("undefined"),
110+
SemanticTokenModifier::new("unused"),
111+
SemanticTokenModifier::new("deprecated"),
112+
],
113+
}
114+
}
115+
116+
pub fn find_all(db: &dyn Db, uri: &Url, viewport: Option<Range>) -> Option<SemanticTokens> {
110117
let workspace = Workspace::get(db);
111118
let document = workspace.lookup_uri(db, uri)?;
112-
let viewport = document.line_index(db).offset_lsp_range(viewport);
119+
let viewport = viewport.map_or_else(
120+
|| TextRange::new(0.into(), document.text(db).text_len()),
121+
|range| document.line_index(db).offset_lsp_range(range),
122+
);
123+
124+
let context = Context {
125+
db,
126+
document,
127+
viewport,
128+
};
129+
113130
let mut builder = TokenBuilder::default();
114-
label::find(db, document, viewport, &mut builder);
115-
math_delimiter::find(db, document, viewport, &mut builder);
131+
132+
label::find(context, &mut builder);
133+
math_delimiter::find(context, &mut builder);
134+
116135
Some(builder.finish(document.line_index(db)))
117136
}

src/features/semantic_tokens/label.rs

Lines changed: 7 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,34 +1,28 @@
1-
use rowan::TextRange;
2-
31
use crate::{
42
db::{analysis::label, Document, Workspace},
53
Db,
64
};
75

8-
use super::{Token, TokenBuilder, TokenKind, TokenModifiers};
6+
use super::{Context, Token, TokenBuilder, TokenKind, TokenModifiers};
97

10-
pub fn find(
11-
db: &dyn Db,
12-
document: Document,
13-
viewport: TextRange,
14-
builder: &mut TokenBuilder,
15-
) -> Option<()> {
16-
let labels = document.parse(db).as_tex()?.analyze(db).labels(db);
8+
pub(super) fn find(context: Context, builder: &mut TokenBuilder) -> Option<()> {
9+
let db = context.db;
10+
let labels = context.document.parse(db).as_tex()?.analyze(db).labels(db);
1711
for label in labels
1812
.iter()
19-
.filter(|label| viewport.intersect(label.range(db)).is_some())
13+
.filter(|label| context.viewport.intersect(label.range(db)).is_some())
2014
{
2115
let name = label.name(db).text(db);
2216
let modifiers = match label.origin(db) {
2317
label::Origin::Definition(_) => {
24-
if !is_label_referenced(db, document, name) {
18+
if !is_label_referenced(db, context.document, name) {
2519
TokenModifiers::UNUSED
2620
} else {
2721
TokenModifiers::NONE
2822
}
2923
}
3024
label::Origin::Reference(_) | label::Origin::ReferenceRange(_) => {
31-
if !is_label_defined(db, document, name) {
25+
if !is_label_defined(db, context.document, name) {
3226
TokenModifiers::UNDEFINED
3327
} else {
3428
TokenModifiers::NONE

src/features/semantic_tokens/math_delimiter.rs

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,13 @@
1-
use rowan::TextRange;
1+
use crate::syntax::latex;
22

3-
use crate::{db::Document, syntax::latex, Db};
3+
use super::{Context, Token, TokenBuilder, TokenKind, TokenModifiers};
44

5-
use super::{Token, TokenBuilder, TokenKind, TokenModifiers};
6-
7-
pub fn find(
8-
db: &dyn Db,
9-
document: Document,
10-
viewport: TextRange,
11-
builder: &mut TokenBuilder,
12-
) -> Option<()> {
13-
let root = document.parse(db).as_tex()?.root(db);
5+
pub(super) fn find(context: Context, builder: &mut TokenBuilder) -> Option<()> {
6+
let db = context.db;
7+
let root = context.document.parse(db).as_tex()?.root(db);
148

159
for token in root
16-
.covering_element(viewport)
10+
.covering_element(context.viewport)
1711
.as_node()?
1812
.descendants_with_tokens()
1913
.filter_map(|elem| elem.into_token())

src/server.rs

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ use log::{error, info};
1313
use lsp_server::{Connection, ErrorCode, Message, RequestId};
1414
use lsp_types::{notification::*, request::*, *};
1515
use once_cell::sync::Lazy;
16-
use rowan::{ast::AstNode, TextLen, TextRange, TextSize};
16+
use rowan::{ast::AstNode, TextSize};
1717
use rustc_hash::FxHashSet;
1818
use serde::{Deserialize, Serialize};
1919
use serde_repr::{Deserialize_repr, Serialize_repr};
@@ -816,9 +816,7 @@ impl Server {
816816

817817
fn semantic_tokens_full(&mut self, id: RequestId, params: SemanticTokensParams) -> Result<()> {
818818
self.run_with_db(id, move |db| {
819-
let Some(document) = Workspace::get(db).lookup_uri(db, &params.text_document.uri) else { return None };
820-
let range = document.line_index(db).line_col_lsp_range(TextRange::new(0.into(), document.text(db).text_len()));
821-
semantic_tokens::find_all(db, &params.text_document.uri, range)
819+
semantic_tokens::find_all(db, &params.text_document.uri, None)
822820
});
823821

824822
Ok(())
@@ -830,7 +828,7 @@ impl Server {
830828
params: SemanticTokensRangeParams,
831829
) -> Result<()> {
832830
self.run_with_db(id, move |db| {
833-
semantic_tokens::find_all(db, &params.text_document.uri, params.range)
831+
semantic_tokens::find_all(db, &params.text_document.uri, Some(params.range))
834832
});
835833

836834
Ok(())

0 commit comments

Comments
 (0)