|
| 1 | +//! This module provides `StaticIndex` which is used for powering |
| 2 | +//! read-only code browsers and emitting LSIF |
| 3 | +
|
| 4 | +use std::collections::HashMap; |
| 5 | + |
| 6 | +use hir::Semantics; |
| 7 | +use hir::{db::HirDatabase, Crate, Module}; |
| 8 | +use ide_db::base_db::{FileId, FileRange, SourceDatabaseExt}; |
| 9 | +use ide_db::defs::Definition; |
| 10 | +use ide_db::RootDatabase; |
| 11 | +use rustc_hash::FxHashSet; |
| 12 | +use syntax::{AstNode, SyntaxKind::*, T}; |
| 13 | +use syntax::{SyntaxToken, TextRange}; |
| 14 | + |
| 15 | +use crate::display::TryToNav; |
| 16 | +use crate::hover::hover_for_definition; |
| 17 | +use crate::{Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult}; |
| 18 | + |
| 19 | +/// A static representation of fully analyzed source code. |
| 20 | +/// |
| 21 | +/// The intended use-case is powering read-only code browsers and emitting LSIF |
| 22 | +pub struct StaticIndex<'a> { |
| 23 | + pub files: Vec<StaticIndexedFile>, |
| 24 | + pub tokens: TokenStore, |
| 25 | + analysis: &'a Analysis, |
| 26 | + db: &'a RootDatabase, |
| 27 | + def_map: HashMap<Definition, TokenId>, |
| 28 | +} |
| 29 | + |
| 30 | +pub struct ReferenceData { |
| 31 | + pub range: FileRange, |
| 32 | + pub is_definition: bool, |
| 33 | +} |
| 34 | + |
| 35 | +pub struct TokenStaticData { |
| 36 | + pub hover: Option<HoverResult>, |
| 37 | + pub definition: Option<FileRange>, |
| 38 | + pub references: Vec<ReferenceData>, |
| 39 | +} |
| 40 | + |
| 41 | +#[derive(Clone, Copy, PartialEq, Eq, Hash)] |
| 42 | +pub struct TokenId(usize); |
| 43 | + |
| 44 | +#[derive(Default)] |
| 45 | +pub struct TokenStore(Vec<TokenStaticData>); |
| 46 | + |
| 47 | +impl TokenStore { |
| 48 | + pub fn insert(&mut self, data: TokenStaticData) -> TokenId { |
| 49 | + let id = TokenId(self.0.len()); |
| 50 | + self.0.push(data); |
| 51 | + id |
| 52 | + } |
| 53 | + |
| 54 | + pub fn get_mut(&mut self, id: TokenId) -> Option<&mut TokenStaticData> { |
| 55 | + self.0.get_mut(id.0) |
| 56 | + } |
| 57 | + |
| 58 | + pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> { |
| 59 | + self.0.get(id.0) |
| 60 | + } |
| 61 | + |
| 62 | + pub fn iter(self) -> impl Iterator<Item = (TokenId, TokenStaticData)> { |
| 63 | + self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x)) |
| 64 | + } |
| 65 | +} |
| 66 | + |
| 67 | +pub struct StaticIndexedFile { |
| 68 | + pub file_id: FileId, |
| 69 | + pub folds: Vec<Fold>, |
| 70 | + pub tokens: Vec<(TextRange, TokenId)>, |
| 71 | +} |
| 72 | + |
| 73 | +fn all_modules(db: &dyn HirDatabase) -> Vec<Module> { |
| 74 | + let mut worklist: Vec<_> = |
| 75 | + Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect(); |
| 76 | + let mut modules = Vec::new(); |
| 77 | + |
| 78 | + while let Some(module) = worklist.pop() { |
| 79 | + modules.push(module); |
| 80 | + worklist.extend(module.children(db)); |
| 81 | + } |
| 82 | + |
| 83 | + modules |
| 84 | +} |
| 85 | + |
| 86 | +impl StaticIndex<'_> { |
| 87 | + fn add_file(&mut self, file_id: FileId) { |
| 88 | + let folds = self.analysis.folding_ranges(file_id).unwrap(); |
| 89 | + // hovers |
| 90 | + let sema = hir::Semantics::new(self.db); |
| 91 | + let tokens_or_nodes = sema.parse(file_id).syntax().clone(); |
| 92 | + let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x { |
| 93 | + syntax::NodeOrToken::Node(_) => None, |
| 94 | + syntax::NodeOrToken::Token(x) => Some(x), |
| 95 | + }); |
| 96 | + let hover_config = |
| 97 | + HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) }; |
| 98 | + let tokens = tokens.filter(|token| match token.kind() { |
| 99 | + IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => true, |
| 100 | + _ => false, |
| 101 | + }); |
| 102 | + let mut result = StaticIndexedFile { file_id, folds, tokens: vec![] }; |
| 103 | + for token in tokens { |
| 104 | + let range = token.text_range(); |
| 105 | + let node = token.parent().unwrap(); |
| 106 | + let def = if let Some(x) = get_definition(&sema, token.clone()) { |
| 107 | + x |
| 108 | + } else { |
| 109 | + continue; |
| 110 | + }; |
| 111 | + let id = if let Some(x) = self.def_map.get(&def) { |
| 112 | + *x |
| 113 | + } else { |
| 114 | + let x = self.tokens.insert(TokenStaticData { |
| 115 | + hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), |
| 116 | + definition: def |
| 117 | + .try_to_nav(self.db) |
| 118 | + .map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }), |
| 119 | + references: vec![], |
| 120 | + }); |
| 121 | + self.def_map.insert(def, x); |
| 122 | + x |
| 123 | + }; |
| 124 | + let token = self.tokens.get_mut(id).unwrap(); |
| 125 | + token.references.push(ReferenceData { |
| 126 | + range: FileRange { range, file_id }, |
| 127 | + is_definition: if let Some(x) = def.try_to_nav(self.db) { |
| 128 | + x.file_id == file_id && x.focus_or_full_range() == range |
| 129 | + } else { |
| 130 | + false |
| 131 | + }, |
| 132 | + }); |
| 133 | + result.tokens.push((range, id)); |
| 134 | + } |
| 135 | + self.files.push(result); |
| 136 | + } |
| 137 | + |
| 138 | + pub fn compute<'a>(db: &'a RootDatabase, analysis: &'a Analysis) -> StaticIndex<'a> { |
| 139 | + let work = all_modules(db).into_iter().filter(|module| { |
| 140 | + let file_id = module.definition_source(db).file_id.original_file(db); |
| 141 | + let source_root = db.file_source_root(file_id); |
| 142 | + let source_root = db.source_root(source_root); |
| 143 | + !source_root.is_library |
| 144 | + }); |
| 145 | + let mut this = StaticIndex { |
| 146 | + files: vec![], |
| 147 | + tokens: Default::default(), |
| 148 | + analysis, |
| 149 | + db, |
| 150 | + def_map: Default::default(), |
| 151 | + }; |
| 152 | + let mut visited_files = FxHashSet::default(); |
| 153 | + for module in work { |
| 154 | + let file_id = module.definition_source(db).file_id.original_file(db); |
| 155 | + if visited_files.contains(&file_id) { |
| 156 | + continue; |
| 157 | + } |
| 158 | + this.add_file(file_id); |
| 159 | + // mark the file |
| 160 | + visited_files.insert(file_id); |
| 161 | + } |
| 162 | + this |
| 163 | + } |
| 164 | +} |
| 165 | + |
| 166 | +fn get_definition(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Definition> { |
| 167 | + for token in sema.descend_into_macros_many(token) { |
| 168 | + let def = Definition::from_token(&sema, &token); |
| 169 | + if let [x] = def.as_slice() { |
| 170 | + return Some(*x); |
| 171 | + } else { |
| 172 | + continue; |
| 173 | + }; |
| 174 | + } |
| 175 | + None |
| 176 | +} |
| 177 | + |
| 178 | +#[cfg(test)] |
| 179 | +mod tests { |
| 180 | + use crate::{fixture, StaticIndex}; |
| 181 | + use ide_db::base_db::FileRange; |
| 182 | + use std::collections::HashSet; |
| 183 | + |
| 184 | + fn check_all_ranges(ra_fixture: &str) { |
| 185 | + let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); |
| 186 | + let s = StaticIndex::compute(&*analysis.db, &analysis); |
| 187 | + let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect(); |
| 188 | + for f in s.files { |
| 189 | + for (range, _) in f.tokens { |
| 190 | + let x = FileRange { file_id: f.file_id, range }; |
| 191 | + if !range_set.contains(&x) { |
| 192 | + panic!("additional range {:?}", x); |
| 193 | + } |
| 194 | + range_set.remove(&x); |
| 195 | + } |
| 196 | + } |
| 197 | + if !range_set.is_empty() { |
| 198 | + panic!("unfound ranges {:?}", range_set); |
| 199 | + } |
| 200 | + } |
| 201 | + |
| 202 | + fn check_definitions(ra_fixture: &str) { |
| 203 | + let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); |
| 204 | + let s = StaticIndex::compute(&*analysis.db, &analysis); |
| 205 | + let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect(); |
| 206 | + for (_, t) in s.tokens.iter() { |
| 207 | + if let Some(x) = t.definition { |
| 208 | + if !range_set.contains(&x) { |
| 209 | + panic!("additional definition {:?}", x); |
| 210 | + } |
| 211 | + range_set.remove(&x); |
| 212 | + } |
| 213 | + } |
| 214 | + if !range_set.is_empty() { |
| 215 | + panic!("unfound definitions {:?}", range_set); |
| 216 | + } |
| 217 | + } |
| 218 | + |
| 219 | + #[test] |
| 220 | + fn struct_and_enum() { |
| 221 | + check_all_ranges( |
| 222 | + r#" |
| 223 | +struct Foo; |
| 224 | + //^^^ |
| 225 | +enum E { X(Foo) } |
| 226 | + //^ ^ ^^^ |
| 227 | +"#, |
| 228 | + ); |
| 229 | + check_definitions( |
| 230 | + r#" |
| 231 | +struct Foo; |
| 232 | + //^^^ |
| 233 | +enum E { X(Foo) } |
| 234 | + //^ ^ |
| 235 | +"#, |
| 236 | + ); |
| 237 | + } |
| 238 | + |
| 239 | + #[test] |
| 240 | + fn derives() { |
| 241 | + check_all_ranges( |
| 242 | + r#" |
| 243 | +#[rustc_builtin_macro] |
| 244 | +pub macro Copy {} |
| 245 | + //^^^^ |
| 246 | +#[rustc_builtin_macro] |
| 247 | +pub macro derive {} |
| 248 | + //^^^^^^ |
| 249 | +#[derive(Copy)] |
| 250 | +//^^^^^^ ^^^^ |
| 251 | +struct Hello(i32); |
| 252 | + //^^^^^ ^^^ |
| 253 | +"#, |
| 254 | + ); |
| 255 | + } |
| 256 | +} |
0 commit comments