Skip to content

Commit ae0aeb1

Browse files
bors[bot]kjeremy
andauthored
Merge #3307
3307: Semantic Ranges r=matklad a=kjeremy Co-authored-by: Jeremy Kolb <[email protected]> Co-authored-by: kjeremy <[email protected]>
2 parents d3040c0 + fa355d6 commit ae0aeb1

File tree

6 files changed

+131
-41
lines changed

6 files changed

+131
-41
lines changed

crates/ra_ide/src/lib.rs

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -425,9 +425,14 @@ impl Analysis {
425425
self.with_db(|db| runnables::runnables(db, file_id))
426426
}
427427

428-
/// Computes syntax highlighting for the given file.
428+
/// Computes syntax highlighting for the given file
429429
pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> {
430-
self.with_db(|db| syntax_highlighting::highlight(db, file_id))
430+
self.with_db(|db| syntax_highlighting::highlight(db, file_id, None))
431+
}
432+
433+
/// Computes syntax highlighting for the given file range.
434+
pub fn highlight_range(&self, frange: FileRange) -> Cancelable<Vec<HighlightedRange>> {
435+
self.with_db(|db| syntax_highlighting::highlight(db, frange.file_id, Some(frange.range)))
431436
}
432437

433438
/// Computes syntax highlighting for the given file.

crates/ra_ide/src/syntax_highlighting.rs

Lines changed: 85 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ use ra_db::SourceDatabase;
55
use ra_ide_db::{defs::NameDefinition, RootDatabase};
66
use ra_prof::profile;
77
use ra_syntax::{
8-
ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, TextRange,
9-
WalkEvent, T,
8+
ast, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken,
9+
TextRange, WalkEvent, T,
1010
};
1111
use rustc_hash::FxHashMap;
1212

@@ -67,8 +67,13 @@ fn is_control_keyword(kind: SyntaxKind) -> bool {
6767
}
6868
}
6969

70-
pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
70+
pub(crate) fn highlight(
71+
db: &RootDatabase,
72+
file_id: FileId,
73+
range: Option<TextRange>,
74+
) -> Vec<HighlightedRange> {
7175
let _p = profile("highlight");
76+
7277
let parse = db.parse(file_id);
7378
let root = parse.tree().syntax().clone();
7479

@@ -79,22 +84,56 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
7984

8085
let mut in_macro_call = None;
8186

87+
// Determine the root based on the given range.
88+
let (root, highlight_range) = if let Some(range) = range {
89+
let root = match root.covering_element(range) {
90+
NodeOrToken::Node(node) => node,
91+
NodeOrToken::Token(token) => token.parent(),
92+
};
93+
(root, range)
94+
} else {
95+
(root.clone(), root.text_range())
96+
};
97+
8298
for event in root.preorder_with_tokens() {
8399
match event {
84-
WalkEvent::Enter(node) => match node.kind() {
85-
MACRO_CALL => {
86-
in_macro_call = Some(node.clone());
87-
if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) {
88-
res.push(HighlightedRange { range, tag: tags::MACRO, binding_hash: None });
89-
}
100+
WalkEvent::Enter(node) => {
101+
if node.text_range().intersection(&highlight_range).is_none() {
102+
continue;
90103
}
91-
_ if in_macro_call.is_some() => {
92-
if let Some(token) = node.as_token() {
93-
if let Some((tag, binding_hash)) = highlight_token_tree(
104+
105+
match node.kind() {
106+
MACRO_CALL => {
107+
in_macro_call = Some(node.clone());
108+
if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) {
109+
res.push(HighlightedRange {
110+
range,
111+
tag: tags::MACRO,
112+
binding_hash: None,
113+
});
114+
}
115+
}
116+
_ if in_macro_call.is_some() => {
117+
if let Some(token) = node.as_token() {
118+
if let Some((tag, binding_hash)) = highlight_token_tree(
119+
&mut sb,
120+
&analyzer,
121+
&mut bindings_shadow_count,
122+
InFile::new(file_id.into(), token.clone()),
123+
) {
124+
res.push(HighlightedRange {
125+
range: node.text_range(),
126+
tag,
127+
binding_hash,
128+
});
129+
}
130+
}
131+
}
132+
_ => {
133+
if let Some((tag, binding_hash)) = highlight_node(
94134
&mut sb,
95-
&analyzer,
96135
&mut bindings_shadow_count,
97-
InFile::new(file_id.into(), token.clone()),
136+
InFile::new(file_id.into(), node.clone()),
98137
) {
99138
res.push(HighlightedRange {
100139
range: node.text_range(),
@@ -104,17 +143,12 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
104143
}
105144
}
106145
}
107-
_ => {
108-
if let Some((tag, binding_hash)) = highlight_node(
109-
&mut sb,
110-
&mut bindings_shadow_count,
111-
InFile::new(file_id.into(), node.clone()),
112-
) {
113-
res.push(HighlightedRange { range: node.text_range(), tag, binding_hash });
114-
}
115-
}
116-
},
146+
}
117147
WalkEvent::Leave(node) => {
148+
if node.text_range().intersection(&highlight_range).is_none() {
149+
continue;
150+
}
151+
118152
if let Some(m) = in_macro_call.as_ref() {
119153
if *m == node {
120154
in_macro_call = None;
@@ -265,7 +299,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
265299
)
266300
}
267301

268-
let mut ranges = highlight(db, file_id);
302+
let mut ranges = highlight(db, file_id, None);
269303
ranges.sort_by_key(|it| it.range.start());
270304
// quick non-optimal heuristic to intersect token ranges and highlighted ranges
271305
let mut frontier = 0;
@@ -374,7 +408,10 @@ mod tests {
374408

375409
use test_utils::{assert_eq_text, project_dir, read_text};
376410

377-
use crate::mock_analysis::{single_file, MockAnalysis};
411+
use crate::{
412+
mock_analysis::{single_file, MockAnalysis},
413+
FileRange, TextRange,
414+
};
378415

379416
#[test]
380417
fn test_highlighting() {
@@ -475,4 +512,26 @@ fn bar() {
475512
let _ = host.analysis().highlight(file_id).unwrap();
476513
// eprintln!("elapsed: {:?}", t.elapsed());
477514
}
515+
516+
#[test]
517+
fn test_ranges() {
518+
let (analysis, file_id) = single_file(
519+
r#"
520+
#[derive(Clone, Debug)]
521+
struct Foo {
522+
pub x: i32,
523+
pub y: i32,
524+
}"#,
525+
);
526+
527+
// The "x"
528+
let highlights = &analysis
529+
.highlight_range(FileRange {
530+
file_id,
531+
range: TextRange::offset_len(82.into(), 1.into()),
532+
})
533+
.unwrap();
534+
535+
assert_eq!(highlights[0].tag, "field");
536+
}
478537
}

crates/rust-analyzer/src/caps.rs

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,9 @@ use lsp_types::{
77
CompletionOptions, DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability,
88
ImplementationProviderCapability, RenameOptions, RenameProviderCapability, SaveOptions,
99
SelectionRangeProviderCapability, SemanticTokensDocumentProvider, SemanticTokensLegend,
10-
SemanticTokensOptions, SemanticTokensServerCapabilities, ServerCapabilities,
11-
SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind,
12-
TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions,
10+
SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability,
11+
TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability,
12+
WorkDoneProgressOptions,
1313
};
1414

1515
pub fn server_capabilities() -> ServerCapabilities {
@@ -60,7 +60,7 @@ pub fn server_capabilities() -> ServerCapabilities {
6060
execute_command_provider: None,
6161
workspace: None,
6262
call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
63-
semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensOptions(
63+
semantic_tokens_provider: Some(
6464
SemanticTokensOptions {
6565
legend: SemanticTokensLegend {
6666
token_types: semantic_tokens::supported_token_types().iter().cloned().collect(),
@@ -71,9 +71,11 @@ pub fn server_capabilities() -> ServerCapabilities {
7171
},
7272

7373
document_provider: Some(SemanticTokensDocumentProvider::Bool(true)),
74-
..SemanticTokensOptions::default()
75-
},
76-
)),
74+
range_provider: Some(true),
75+
work_done_progress_options: Default::default(),
76+
}
77+
.into(),
78+
),
7779
experimental: Default::default(),
7880
}
7981
}

crates/rust-analyzer/src/main_loop.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -527,8 +527,9 @@ fn on_request(
527527
.on::<req::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)?
528528
.on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)?
529529
.on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)?
530-
.on::<req::Ssr>(handlers::handle_ssr)?
531530
.on::<req::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
531+
.on::<req::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range)?
532+
.on::<req::Ssr>(handlers::handle_ssr)?
532533
.finish();
533534
Ok(())
534535
}

crates/rust-analyzer/src/main_loop/handlers.rs

Lines changed: 24 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@ use lsp_types::{
1717
Diagnostic, DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange,
1818
FoldingRangeParams, Hover, HoverContents, Location, MarkupContent, MarkupKind, Position,
1919
PrepareRenameResponse, Range, RenameParams, SemanticTokenModifier, SemanticTokenType,
20-
SemanticTokens, SemanticTokensParams, SemanticTokensResult, SymbolInformation,
21-
TextDocumentIdentifier, TextEdit, WorkspaceEdit,
20+
SemanticTokens, SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult,
21+
SemanticTokensResult, SymbolInformation, TextDocumentIdentifier, TextEdit, WorkspaceEdit,
2222
};
2323
use ra_ide::{
2424
AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind,
@@ -1092,3 +1092,25 @@ pub fn handle_semantic_tokens(
10921092

10931093
Ok(Some(tokens.into()))
10941094
}
1095+
1096+
pub fn handle_semantic_tokens_range(
1097+
world: WorldSnapshot,
1098+
params: SemanticTokensRangeParams,
1099+
) -> Result<Option<SemanticTokensRangeResult>> {
1100+
let _p = profile("handle_semantic_tokens_range");
1101+
1102+
let frange = (&params.text_document, params.range).try_conv_with(&world)?;
1103+
let line_index = world.analysis().file_line_index(frange.file_id)?;
1104+
1105+
let mut builder = SemanticTokensBuilder::default();
1106+
1107+
for h in world.analysis().highlight_range(frange)?.into_iter() {
1108+
let type_and_modifiers: (SemanticTokenType, Vec<SemanticTokenModifier>) = h.tag.conv();
1109+
let (token_type, token_modifiers) = type_and_modifiers.conv();
1110+
builder.push(h.range.conv_with(&line_index), token_type, token_modifiers);
1111+
}
1112+
1113+
let tokens = SemanticTokens { data: builder.build(), ..Default::default() };
1114+
1115+
Ok(Some(tokens.into()))
1116+
}

crates/rust-analyzer/src/req.rs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,10 @@ pub use lsp_types::{
1212
DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType,
1313
PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken,
1414
PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange,
15-
SelectionRangeParams, SemanticTokensParams, SemanticTokensResult, ServerCapabilities,
16-
ShowMessageParams, SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams,
17-
TextEdit, WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams,
15+
SelectionRangeParams, SemanticTokensParams, SemanticTokensRangeParams,
16+
SemanticTokensRangeResult, SemanticTokensResult, ServerCapabilities, ShowMessageParams,
17+
SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams, TextEdit,
18+
WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams,
1819
};
1920

2021
pub enum AnalyzerStatus {}

0 commit comments

Comments
 (0)