Skip to content

Allow semantic tokens for strings to be disabled #8795

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
May 17, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions crates/rust-analyzer/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,13 @@ config_data! {
/// Advanced option, fully override the command rust-analyzer uses for
/// formatting.
rustfmt_overrideCommand: Option<Vec<String>> = "null",

/// Use semantic tokens for strings.
///
/// In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
/// By disabling semantic tokens for strings, other grammars can be used to highlight
/// their contents.
semanticStringTokens: bool = "true",
}
}

Expand Down Expand Up @@ -381,6 +388,9 @@ impl Config {
pub fn line_folding_only(&self) -> bool {
try_or!(self.caps.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only?, false)
}
pub fn semantic_strings(&self) -> bool {
self.data.semanticStringTokens
}
pub fn hierarchical_symbols(&self) -> bool {
try_or!(
self.caps
Expand Down
13 changes: 9 additions & 4 deletions crates/rust-analyzer/src/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1376,7 +1376,9 @@ pub(crate) fn handle_semantic_tokens_full(
let line_index = snap.file_line_index(file_id)?;

let highlights = snap.analysis.highlight(file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_strings = snap.config.semantic_strings();
let semantic_tokens =
to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);

// Unconditionally cache the tokens
snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
Expand All @@ -1395,8 +1397,9 @@ pub(crate) fn handle_semantic_tokens_full_delta(
let line_index = snap.file_line_index(file_id)?;

let highlights = snap.analysis.highlight(file_id)?;

let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_strings = snap.config.semantic_strings();
let semantic_tokens =
to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);

let mut cache = snap.semantic_tokens_cache.lock();
let cached_tokens = cache.entry(params.text_document.uri).or_default();
Expand Down Expand Up @@ -1425,7 +1428,9 @@ pub(crate) fn handle_semantic_tokens_range(
let line_index = snap.file_line_index(frange.file_id)?;

let highlights = snap.analysis.highlight_range(frange)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_strings = snap.config.semantic_strings();
let semantic_tokens =
to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);
Ok(Some(semantic_tokens.into()))
}

Expand Down
8 changes: 6 additions & 2 deletions crates/rust-analyzer/src/to_proto.rs
Original file line number Diff line number Diff line change
Expand Up @@ -381,6 +381,7 @@ pub(crate) fn semantic_tokens(
text: &str,
line_index: &LineIndex,
highlights: Vec<HlRange>,
include_strings: bool,
) -> lsp_types::SemanticTokens {
let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
Expand All @@ -389,8 +390,11 @@ pub(crate) fn semantic_tokens(
if highlight_range.highlight.is_empty() {
continue;
}
let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
let token_index = semantic_tokens::type_index(type_);
let (typ, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
if !include_strings && typ == lsp_types::SemanticTokenType::STRING {
continue;
}
let token_index = semantic_tokens::type_index(typ);
let modifier_bitset = mods.0;

for mut text_range in line_index.index.lines(highlight_range.range) {
Expand Down
43 changes: 39 additions & 4 deletions crates/rust-analyzer/tests/rust-analyzer/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,16 @@ use lsp_types::{
notification::DidOpenTextDocument,
request::{
CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest,
WillRenameFiles,
SemanticTokensRangeRequest, WillRenameFiles,
},
CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams,
DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams,
PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem,
TextDocumentPositionParams, WorkDoneProgressParams,
PartialResultParams, Position, Range, RenameFilesParams, SemanticTokens,
SemanticTokensRangeParams, TextDocumentItem, TextDocumentPositionParams,
WorkDoneProgressParams,
};
use rust_analyzer::lsp_ext::{OnEnter, Runnables, RunnablesParams};
use serde_json::json;
use serde_json::{from_value, json};
use test_utils::skip_slow_tests;

use crate::{
Expand All @@ -37,6 +38,40 @@ use crate::{
const PROFILE: &str = "";
// const PROFILE: &'static str = "*@3>100";

#[test]
fn can_disable_semantic_strings() {
if skip_slow_tests() {
return;
}

[true, false].iter().for_each(|semantic_strings| {
let server = Project::with_fixture(
r#"
//- /Cargo.toml
[package]
name = "foo"
version = "0.0.0"

//- /src/lib.rs
const foo: &'static str = "hi";
"#,
)
.with_config(serde_json::json!({ "semanticStringTokens": semantic_strings }))
.server()
.wait_until_workspace_is_loaded();

let res = server.send_request::<SemanticTokensRangeRequest>(SemanticTokensRangeParams {
text_document: server.doc_id("src/lib.rs"),
partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(),
range: Range::new(Position::new(0, 26), Position::new(0, 30)),
});

let tok_res: SemanticTokens = from_value(res).expect("invalid server response");
assert!(tok_res.data.len() == *semantic_strings as usize);
});
}

#[test]
fn completes_items_from_standard_library() {
if skip_slow_tests() {
Expand Down
9 changes: 9 additions & 0 deletions docs/user/generated_config.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -332,3 +332,12 @@ Additional arguments to `rustfmt`.
Advanced option, fully override the command rust-analyzer uses for
formatting.
--
[[rust-analyzer.semanticStringTokens]]rust-analyzer.semanticStringTokens (default: `true`)::
+
--
Use semantic tokens for strings.

In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
By disabling semantic tokens for strings, other grammars can be used to highlight
their contents.
--
5 changes: 5 additions & 0 deletions editors/code/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -770,6 +770,11 @@
"type": "string"
}
},
"rust-analyzer.semanticStringTokens": {
"markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.",
"default": true,
"type": "boolean"
},
"$generated-end": false
}
},
Expand Down