@@ -11,6 +11,8 @@ pub(crate) mod types;
11
11
pub ( crate ) mod utils;
12
12
13
13
use rustc_ast as ast;
14
+ use rustc_ast:: token:: { Token , TokenKind } ;
15
+ use rustc_ast:: tokenstream:: { TokenStream , TokenTree } ;
14
16
use rustc_attr as attr;
15
17
use rustc_data_structures:: fx:: { FxHashMap , FxHashSet , FxIndexMap , FxIndexSet , IndexEntry } ;
16
18
use rustc_hir as hir;
@@ -2081,8 +2083,8 @@ impl<'hir> hir::intravisit::Visitor<'hir> for OneLevelVisitor<'hir> {
2081
2083
fn visit_item ( & mut self , item : & ' hir hir:: Item < ' hir > ) {
2082
2084
if self . item . is_none ( )
2083
2085
&& item. ident == self . looking_for
2084
- && matches ! ( item. kind, hir:: ItemKind :: Use ( _, _) )
2085
- || item. owner_id . def_id == self . target_def_id
2086
+ && ( matches ! ( item. kind, hir:: ItemKind :: Use ( _, _) )
2087
+ || item. owner_id . def_id == self . target_def_id )
2086
2088
{
2087
2089
self . item = Some ( item) ;
2088
2090
}
@@ -2103,33 +2105,74 @@ fn get_all_import_attributes<'hir>(
2103
2105
let mut visitor = OneLevelVisitor :: new ( hir_map, target_def_id) ;
2104
2106
let mut visited = FxHashSet :: default ( ) ;
2105
2107
// If the item is an import and has at least a path with two parts, we go into it.
2106
- while let hir:: ItemKind :: Use ( path, _) = item. kind &&
2107
- path. segments . len ( ) > 1 &&
2108
- let hir:: def:: Res :: Def ( _, def_id) = path. segments [ path. segments . len ( ) - 2 ] . res &&
2109
- visited. insert ( def_id)
2110
- {
2111
- if let Some ( hir:: Node :: Item ( parent_item) ) = hir_map. get_if_local ( def_id) {
2112
- // We add the attributes from this import into the list.
2113
- attributes. extend_from_slice ( hir_map. attrs ( item. hir_id ( ) ) ) ;
2114
- // We get the `Ident` we will be looking for into `item`.
2115
- let looking_for = path. segments [ path. segments . len ( ) - 1 ] . ident ;
2116
- visitor. reset ( looking_for) ;
2117
- hir:: intravisit:: walk_item ( & mut visitor, parent_item) ;
2118
- if let Some ( i) = visitor. item {
2119
- item = i;
2120
- } else {
2121
- break ;
2108
+ while let hir:: ItemKind :: Use ( path, _) = item. kind && visited. insert ( item. hir_id ( ) ) {
2109
+ // We add the attributes from this import into the list.
2110
+ add_without_unwanted_attributes ( attributes, hir_map. attrs ( item. hir_id ( ) ) ) ;
2111
+
2112
+ let def_id = if path. segments . len ( ) > 1 {
2113
+ match path. segments [ path. segments . len ( ) - 2 ] . res {
2114
+ hir:: def:: Res :: Def ( _, def_id) => def_id,
2115
+ _ => break ,
2116
+ }
2117
+ } else {
2118
+ // If the path doesn't have a parent, then the parent is the current module.
2119
+ tcx. parent ( item. owner_id . def_id . to_def_id ( ) )
2120
+ } ;
2121
+
2122
+ let Some ( parent) = hir_map. get_if_local ( def_id) else { break } ;
2123
+
2124
+ // We get the `Ident` we will be looking for into `item`.
2125
+ let looking_for = path. segments [ path. segments . len ( ) - 1 ] . ident ;
2126
+ visitor. reset ( looking_for) ;
2127
+
2128
+ match parent {
2129
+ hir:: Node :: Item ( parent_item) => {
2130
+ hir:: intravisit:: walk_item ( & mut visitor, parent_item) ;
2131
+ }
2132
+ hir:: Node :: Crate ( m) => {
2133
+ hir:: intravisit:: walk_mod (
2134
+ & mut visitor,
2135
+ m,
2136
+ tcx. local_def_id_to_hir_id ( def_id. as_local ( ) . unwrap ( ) ) ,
2137
+ ) ;
2122
2138
}
2139
+ _ => break ,
2140
+ }
2141
+ if let Some ( i) = visitor. item {
2142
+ item = i;
2123
2143
} else {
2124
2144
break ;
2125
2145
}
2126
2146
}
2127
2147
}
2128
2148
2149
+ fn filter_tokens_from_list (
2150
+ args_tokens : TokenStream ,
2151
+ should_retain : impl Fn ( & TokenTree ) -> bool ,
2152
+ ) -> Vec < TokenTree > {
2153
+ let mut tokens = Vec :: with_capacity ( args_tokens. len ( ) ) ;
2154
+ let mut skip_next_comma = false ;
2155
+ for token in args_tokens. into_trees ( ) {
2156
+ match token {
2157
+ TokenTree :: Token ( Token { kind : TokenKind :: Comma , .. } , _) if skip_next_comma => {
2158
+ skip_next_comma = false ;
2159
+ }
2160
+ token if should_retain ( & token) => {
2161
+ skip_next_comma = false ;
2162
+ tokens. push ( token) ;
2163
+ }
2164
+ _ => {
2165
+ skip_next_comma = true ;
2166
+ }
2167
+ }
2168
+ }
2169
+ tokens
2170
+ }
2171
+
2129
2172
/// When inlining items, we merge its attributes (and all the reexports attributes too) with the
2130
2173
/// final reexport. For example:
2131
2174
///
2132
- /// ```
2175
+ /// ```ignore (just an example)
2133
2176
/// #[doc(hidden, cfg(feature = "foo"))]
2134
2177
/// pub struct Foo;
2135
2178
///
@@ -2147,55 +2190,38 @@ fn get_all_import_attributes<'hir>(
2147
2190
/// * `doc(no_inline)`
2148
2191
/// * `doc(hidden)`
2149
2192
fn add_without_unwanted_attributes ( attrs : & mut Vec < ast:: Attribute > , new_attrs : & [ ast:: Attribute ] ) {
2150
- use rustc_ast:: token:: { Token , TokenKind } ;
2151
- use rustc_ast:: tokenstream:: { TokenStream , TokenTree } ;
2152
-
2153
2193
for attr in new_attrs {
2154
2194
let mut attr = attr. clone ( ) ;
2155
2195
match attr. kind {
2156
2196
ast:: AttrKind :: Normal ( ref mut normal) => {
2157
- if let [ ident] = & * normal. item . path . segments {
2158
- let ident = ident. ident . name ;
2159
- if ident == sym:: doc {
2160
- match normal. item . args {
2161
- ast:: AttrArgs :: Delimited ( ref mut args) => {
2162
- let mut tokens = Vec :: with_capacity ( args. tokens . len ( ) ) ;
2163
- let mut skip_next_comma = false ;
2164
- for token in args. tokens . clone ( ) . into_trees ( ) {
2165
- match token {
2197
+ if let [ ident] = & * normal. item . path . segments &&
2198
+ let ident = ident. ident . name &&
2199
+ ident == sym:: doc
2200
+ {
2201
+ match normal. item . args {
2202
+ ast:: AttrArgs :: Delimited ( ref mut args) => {
2203
+ let tokens =
2204
+ filter_tokens_from_list ( args. tokens . clone ( ) , |token| {
2205
+ !matches ! (
2206
+ token,
2166
2207
TokenTree :: Token (
2167
2208
Token {
2168
- kind :
2169
- TokenKind :: Ident (
2170
- sym:: hidden | sym:: inline | sym:: no_inline,
2171
- _,
2172
- ) ,
2209
+ kind: TokenKind :: Ident (
2210
+ sym:: hidden | sym:: inline | sym:: no_inline,
2211
+ _,
2212
+ ) ,
2173
2213
..
2174
2214
} ,
2175
2215
_,
2176
- ) => {
2177
- skip_next_comma = true ;
2178
- continue ;
2179
- }
2180
- TokenTree :: Token (
2181
- Token { kind : TokenKind :: Comma , .. } ,
2182
- _,
2183
- ) if skip_next_comma => {
2184
- skip_next_comma = false ;
2185
- continue ;
2186
- }
2187
- _ => { }
2188
- }
2189
- skip_next_comma = false ;
2190
- tokens. push ( token) ;
2191
- }
2192
- args. tokens = TokenStream :: new ( tokens) ;
2193
- attrs. push ( attr) ;
2194
- }
2195
- ast:: AttrArgs :: Empty | ast:: AttrArgs :: Eq ( ..) => {
2196
- attrs. push ( attr) ;
2197
- continue ;
2198
- }
2216
+ ) ,
2217
+ )
2218
+ } ) ;
2219
+ args. tokens = TokenStream :: new ( tokens) ;
2220
+ attrs. push ( attr) ;
2221
+ }
2222
+ ast:: AttrArgs :: Empty | ast:: AttrArgs :: Eq ( ..) => {
2223
+ attrs. push ( attr) ;
2224
+ continue ;
2199
2225
}
2200
2226
}
2201
2227
}
0 commit comments