@@ -294,9 +294,8 @@ impl<'db> SemanticsImpl<'db> {
294
294
}
295
295
296
296
fn expand ( & self , macro_call : & ast:: MacroCall ) -> Option < SyntaxNode > {
297
- let macro_call = self . find_file ( macro_call. syntax ( ) . clone ( ) ) . with_value ( macro_call) ;
298
- let sa = self . analyze2 ( macro_call. map ( |it| it. syntax ( ) ) , None ) ;
299
- let file_id = sa. expand ( self . db , macro_call) ?;
297
+ let sa = self . analyze ( macro_call. syntax ( ) ) ;
298
+ let file_id = sa. expand ( self . db , InFile :: new ( sa. file_id , macro_call) ) ?;
300
299
let node = self . db . parse_or_expand ( file_id) ?;
301
300
self . cache ( node. clone ( ) , file_id) ;
302
301
Some ( node)
@@ -308,9 +307,8 @@ impl<'db> SemanticsImpl<'db> {
308
307
hypothetical_args : & ast:: TokenTree ,
309
308
token_to_map : SyntaxToken ,
310
309
) -> Option < ( SyntaxNode , SyntaxToken ) > {
311
- let macro_call =
312
- self . find_file ( actual_macro_call. syntax ( ) . clone ( ) ) . with_value ( actual_macro_call) ;
313
- let sa = self . analyze2 ( macro_call. map ( |it| it. syntax ( ) ) , None ) ;
310
+ let sa = self . analyze ( actual_macro_call. syntax ( ) ) ;
311
+ let macro_call = InFile :: new ( sa. file_id , actual_macro_call) ;
314
312
let krate = sa. resolver . krate ( ) ?;
315
313
let macro_call_id = macro_call. as_call_id ( self . db . upcast ( ) , krate, |path| {
316
314
sa. resolver . resolve_path_as_macro ( self . db . upcast ( ) , & path)
@@ -326,10 +324,9 @@ impl<'db> SemanticsImpl<'db> {
326
324
fn descend_into_macros ( & self , token : SyntaxToken ) -> SyntaxToken {
327
325
let _p = profile:: span ( "descend_into_macros" ) ;
328
326
let parent = token. parent ( ) ;
329
- let parent = self . find_file ( parent) ;
330
- let sa = self . analyze2 ( parent. as_ref ( ) , None ) ;
327
+ let sa = self . analyze ( & parent) ;
331
328
332
- let token = successors ( Some ( parent . with_value ( token) ) , |token| {
329
+ let token = successors ( Some ( InFile :: new ( sa . file_id , token) ) , |token| {
333
330
self . db . check_canceled ( ) ;
334
331
let macro_call = token. value . ancestors ( ) . find_map ( ast:: MacroCall :: cast) ?;
335
332
let tt = macro_call. token_tree ( ) ?;
@@ -486,15 +483,13 @@ impl<'db> SemanticsImpl<'db> {
486
483
}
487
484
488
485
fn scope ( & self , node : & SyntaxNode ) -> SemanticsScope < ' db > {
489
- let node = self . find_file ( node. clone ( ) ) ;
490
- let resolver = self . analyze2 ( node. as_ref ( ) , None ) . resolver ;
491
- SemanticsScope { db : self . db , file_id : node. file_id , resolver }
486
+ let sa = self . analyze ( node) ;
487
+ SemanticsScope { db : self . db , file_id : sa. file_id , resolver : sa. resolver }
492
488
}
493
489
494
490
fn scope_at_offset ( & self , node : & SyntaxNode , offset : TextSize ) -> SemanticsScope < ' db > {
495
- let node = self . find_file ( node. clone ( ) ) ;
496
- let resolver = self . analyze2 ( node. as_ref ( ) , Some ( offset) ) . resolver ;
497
- SemanticsScope { db : self . db , file_id : node. file_id , resolver }
491
+ let sa = self . analyze_with_offset ( node, offset) ;
492
+ SemanticsScope { db : self . db , file_id : sa. file_id , resolver : sa. resolver }
498
493
}
499
494
500
495
fn scope_for_def ( & self , def : Trait ) -> SemanticsScope < ' db > {
@@ -504,21 +499,24 @@ impl<'db> SemanticsImpl<'db> {
504
499
}
505
500
506
501
fn analyze ( & self , node : & SyntaxNode ) -> SourceAnalyzer {
507
- let src = self . find_file ( node. clone ( ) ) ;
508
- self . analyze2 ( src. as_ref ( ) , None )
502
+ self . analyze_impl ( node, None )
509
503
}
504
+ fn analyze_with_offset ( & self , node : & SyntaxNode , offset : TextSize ) -> SourceAnalyzer {
505
+ self . analyze_impl ( node, Some ( offset) )
506
+ }
507
+ fn analyze_impl ( & self , node : & SyntaxNode , offset : Option < TextSize > ) -> SourceAnalyzer {
508
+ let _p = profile:: span ( "Semantics::analyze_impl" ) ;
509
+ let node = self . find_file ( node. clone ( ) ) ;
510
+ let node = node. as_ref ( ) ;
510
511
511
- fn analyze2 ( & self , src : InFile < & SyntaxNode > , offset : Option < TextSize > ) -> SourceAnalyzer {
512
- let _p = profile:: span ( "Semantics::analyze2" ) ;
513
-
514
- let container = match self . with_ctx ( |ctx| ctx. find_container ( src) ) {
512
+ let container = match self . with_ctx ( |ctx| ctx. find_container ( node) ) {
515
513
Some ( it) => it,
516
- None => return SourceAnalyzer :: new_for_resolver ( Resolver :: default ( ) , src ) ,
514
+ None => return SourceAnalyzer :: new_for_resolver ( Resolver :: default ( ) , node ) ,
517
515
} ;
518
516
519
517
let resolver = match container {
520
518
ChildContainer :: DefWithBodyId ( def) => {
521
- return SourceAnalyzer :: new_for_body ( self . db , def, src , offset)
519
+ return SourceAnalyzer :: new_for_body ( self . db , def, node , offset)
522
520
}
523
521
ChildContainer :: TraitId ( it) => it. resolver ( self . db . upcast ( ) ) ,
524
522
ChildContainer :: ImplId ( it) => it. resolver ( self . db . upcast ( ) ) ,
@@ -528,7 +526,7 @@ impl<'db> SemanticsImpl<'db> {
528
526
ChildContainer :: TypeAliasId ( it) => it. resolver ( self . db . upcast ( ) ) ,
529
527
ChildContainer :: GenericDefId ( it) => it. resolver ( self . db . upcast ( ) ) ,
530
528
} ;
531
- SourceAnalyzer :: new_for_resolver ( resolver, src )
529
+ SourceAnalyzer :: new_for_resolver ( resolver, node )
532
530
}
533
531
534
532
fn cache ( & self , root_node : SyntaxNode , file_id : HirFileId ) {
0 commit comments