@@ -14,7 +14,7 @@ use rustc::hir::def_id::DefId;
14
14
use rustc:: middle:: cstore:: LOCAL_CRATE ;
15
15
use rustc:: session:: Session ;
16
16
use rustc:: ty:: TyCtxt ;
17
- use rustc_serialize:: { Encodable as RustcEncodable } ;
17
+ use rustc_serialize:: Encodable as RustcEncodable ;
18
18
use std:: hash:: { Hash , Hasher , SipHasher } ;
19
19
use std:: io:: { self , Cursor , Write } ;
20
20
use std:: fs:: { self , File } ;
@@ -35,8 +35,12 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
35
35
let mut hcx = HashContext :: new ( tcx) ;
36
36
let mut builder = DefIdDirectoryBuilder :: new ( tcx) ;
37
37
let query = tcx. dep_graph . query ( ) ;
38
- save_in ( sess, dep_graph_path ( tcx) , |e| encode_dep_graph ( & mut hcx, & mut builder, & query, e) ) ;
39
- save_in ( sess, metadata_hash_path ( tcx, LOCAL_CRATE ) , |e| encode_metadata_hashes ( & mut hcx, & mut builder, & query, e) ) ;
38
+ save_in ( sess,
39
+ dep_graph_path ( tcx) ,
40
+ |e| encode_dep_graph ( & mut hcx, & mut builder, & query, e) ) ;
41
+ save_in ( sess,
42
+ metadata_hash_path ( tcx, LOCAL_CRATE ) ,
43
+ |e| encode_metadata_hashes ( & mut hcx, & mut builder, & query, e) ) ;
40
44
}
41
45
42
46
pub fn save_work_products ( sess : & Session , local_crate_name : & str ) {
@@ -46,26 +50,24 @@ pub fn save_work_products(sess: &Session, local_crate_name: &str) {
46
50
save_in ( sess, path, |e| encode_work_products ( sess, e) ) ;
47
51
}
48
52
49
- fn save_in < F > ( sess : & Session ,
50
- opt_path_buf : Option < PathBuf > ,
51
- encode : F )
53
+ fn save_in < F > ( sess : & Session , opt_path_buf : Option < PathBuf > , encode : F )
52
54
where F : FnOnce ( & mut Encoder ) -> io:: Result < ( ) >
53
55
{
54
56
let path_buf = match opt_path_buf {
55
57
Some ( p) => p,
56
- None => return
58
+ None => return ,
57
59
} ;
58
60
59
61
// FIXME(#32754) lock file?
60
62
61
63
// delete the old dep-graph, if any
62
64
if path_buf. exists ( ) {
63
65
match fs:: remove_file ( & path_buf) {
64
- Ok ( ( ) ) => { }
66
+ Ok ( ( ) ) => { }
65
67
Err ( err) => {
66
- sess. err (
67
- & format ! ( "unable to delete old dep-graph at `{}`: {}" ,
68
- path_buf . display ( ) , err) ) ;
68
+ sess. err ( & format ! ( "unable to delete old dep-graph at `{}`: {}" ,
69
+ path_buf . display ( ) ,
70
+ err) ) ;
69
71
return ;
70
72
}
71
73
}
@@ -74,26 +76,23 @@ fn save_in<F>(sess: &Session,
74
76
// generate the data in a memory buffer
75
77
let mut wr = Cursor :: new ( Vec :: new ( ) ) ;
76
78
match encode ( & mut Encoder :: new ( & mut wr) ) {
77
- Ok ( ( ) ) => { }
79
+ Ok ( ( ) ) => { }
78
80
Err ( err) => {
79
- sess. err (
80
- & format ! ( "could not encode dep-graph to `{}`: {}" ,
81
- path_buf . display ( ) , err) ) ;
81
+ sess. err ( & format ! ( "could not encode dep-graph to `{}`: {}" ,
82
+ path_buf . display ( ) ,
83
+ err) ) ;
82
84
return ;
83
85
}
84
86
}
85
87
86
88
// write the data out
87
89
let data = wr. into_inner ( ) ;
88
- match
89
- File :: create ( & path_buf)
90
- . and_then ( |mut file| file. write_all ( & data) )
91
- {
92
- Ok ( _) => { }
90
+ match File :: create ( & path_buf) . and_then ( |mut file| file. write_all ( & data) ) {
91
+ Ok ( _) => { }
93
92
Err ( err) => {
94
- sess. err (
95
- & format ! ( "failed to write dep-graph to `{}`: {}" ,
96
- path_buf . display ( ) , err) ) ;
93
+ sess. err ( & format ! ( "failed to write dep-graph to `{}`: {}" ,
94
+ path_buf . display ( ) ,
95
+ err) ) ;
97
96
return ;
98
97
}
99
98
}
@@ -103,32 +102,33 @@ pub fn encode_dep_graph<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
103
102
builder : & mut DefIdDirectoryBuilder ,
104
103
query : & DepGraphQuery < DefId > ,
105
104
encoder : & mut Encoder )
106
- -> io:: Result < ( ) >
107
- {
105
+ -> io:: Result < ( ) > {
108
106
let ( nodes, edges) = ( query. nodes ( ) , query. edges ( ) ) ;
109
107
110
108
// Create hashes for inputs.
111
- let hashes =
112
- nodes. iter ( )
113
- . filter_map ( |dep_node| {
114
- hcx. hash ( dep_node)
115
- . map ( |( _, hash) | {
116
- let node = builder. map ( dep_node) ;
117
- SerializedHash { node : node, hash : hash }
118
- } )
119
- } )
120
- . collect ( ) ;
109
+ let hashes = nodes. iter ( )
110
+ . filter_map ( |dep_node| {
111
+ hcx. hash ( dep_node)
112
+ . map ( |( _, hash) | {
113
+ let node = builder. map ( dep_node) ;
114
+ SerializedHash {
115
+ node : node,
116
+ hash : hash,
117
+ }
118
+ } )
119
+ } )
120
+ . collect ( ) ;
121
121
122
122
// Create the serialized dep-graph.
123
123
let graph = SerializedDepGraph {
124
124
nodes : nodes. iter ( ) . map ( |node| builder. map ( node) ) . collect ( ) ,
125
125
edges : edges. iter ( )
126
- . map ( |& ( ref source_node, ref target_node) | {
127
- let source = builder. map ( source_node) ;
128
- let target = builder. map ( target_node) ;
129
- ( source, target)
130
- } )
131
- . collect ( ) ,
126
+ . map ( |& ( ref source_node, ref target_node) | {
127
+ let source = builder. map ( source_node) ;
128
+ let target = builder. map ( target_node) ;
129
+ ( source, target)
130
+ } )
131
+ . collect ( ) ,
132
132
hashes : hashes,
133
133
} ;
134
134
@@ -145,72 +145,65 @@ pub fn encode_metadata_hashes<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
145
145
builder : & mut DefIdDirectoryBuilder ,
146
146
query : & DepGraphQuery < DefId > ,
147
147
encoder : & mut Encoder )
148
- -> io:: Result < ( ) >
149
- {
148
+ -> io:: Result < ( ) > {
150
149
let tcx = hcx. tcx ;
151
150
152
151
let serialized_hashes = {
153
152
// Identify the `MetaData(X)` nodes where `X` is local. These are
154
153
// the metadata items we export. Downstream crates will want to
155
154
// see a hash that tells them whether we might have changed the
156
155
// metadata for a given item since they last compiled.
157
- let meta_data_def_ids =
158
- query. nodes ( )
159
- . into_iter ( )
160
- . filter_map ( |dep_node| match * dep_node {
161
- DepNode :: MetaData ( def_id) if def_id. is_local ( ) => Some ( def_id) ,
162
- _ => None ,
163
- } ) ;
156
+ let meta_data_def_ids = query. nodes ( )
157
+ . into_iter ( )
158
+ . filter_map ( |dep_node| match * dep_node {
159
+ DepNode :: MetaData ( def_id) if def_id. is_local ( ) => Some ( def_id) ,
160
+ _ => None ,
161
+ } ) ;
164
162
165
163
// To create the hash for each item `X`, we don't hash the raw
166
164
// bytes of the metadata (though in principle we
167
165
// could). Instead, we walk the predecessors of `MetaData(X)`
168
166
// from the dep-graph. This corresponds to all the inputs that
169
167
// were read to construct the metadata. To create the hash for
170
168
// the metadata, we hash (the hash of) all of those inputs.
171
- let hashes =
172
- meta_data_def_ids
173
- . map ( |def_id| {
174
- assert ! ( def_id. is_local( ) ) ;
175
- let dep_node = DepNode :: MetaData ( def_id) ;
176
- let mut state = SipHasher :: new ( ) ;
177
- debug ! ( "save: computing metadata hash for {:?}" , dep_node) ;
178
-
179
- let predecessors = query. transitive_predecessors ( & dep_node) ;
180
- let mut hashes: Vec < _ > =
181
- predecessors. iter ( )
182
- . filter_map ( |node| hcx. hash ( & node) )
183
- . map ( |( def_id, hash) | {
184
- let index = builder. add ( def_id) ;
185
- let path = builder. lookup_def_path ( index) ;
186
- ( path. to_string ( tcx) , hash) // (*)
187
- } )
188
- . collect ( ) ;
189
-
190
- // (*) creating a `String` from each def-path is a bit inefficient,
191
- // but it's the easiest way to get a deterministic ord/hash.
192
-
193
- hashes. sort ( ) ;
194
- state. write_usize ( hashes. len ( ) ) ;
195
- for ( path, hash) in hashes {
196
- debug ! ( "save: predecessor {:?} has hash {}" , path, hash) ;
197
- path. hash ( & mut state) ;
198
- state. write_u64 ( hash. to_le ( ) ) ;
199
- }
200
-
201
- let hash = state. finish ( ) ;
202
- debug ! ( "save: metadata hash for {:?} is {}" , dep_node, hash) ;
203
-
204
- SerializedMetadataHash {
205
- def_index : def_id. index ,
206
- hash : hash,
207
- }
208
- } ) ;
169
+ let hashes = meta_data_def_ids. map ( |def_id| {
170
+ assert ! ( def_id. is_local( ) ) ;
171
+ let dep_node = DepNode :: MetaData ( def_id) ;
172
+ let mut state = SipHasher :: new ( ) ;
173
+ debug ! ( "save: computing metadata hash for {:?}" , dep_node) ;
174
+
175
+ let predecessors = query. transitive_predecessors ( & dep_node) ;
176
+ let mut hashes: Vec < _ > = predecessors. iter ( )
177
+ . filter_map ( |node| hcx. hash ( & node) )
178
+ . map ( |( def_id, hash) | {
179
+ let index = builder. add ( def_id) ;
180
+ let path = builder. lookup_def_path ( index) ;
181
+ ( path. to_string ( tcx) , hash) // (*)
182
+ } )
183
+ . collect ( ) ;
184
+
185
+ // (*) creating a `String` from each def-path is a bit inefficient,
186
+ // but it's the easiest way to get a deterministic ord/hash.
187
+
188
+ hashes. sort ( ) ;
189
+ state. write_usize ( hashes. len ( ) ) ;
190
+ for ( path, hash) in hashes {
191
+ debug ! ( "save: predecessor {:?} has hash {}" , path, hash) ;
192
+ path. hash ( & mut state) ;
193
+ state. write_u64 ( hash. to_le ( ) ) ;
194
+ }
195
+
196
+ let hash = state. finish ( ) ;
197
+ debug ! ( "save: metadata hash for {:?} is {}" , dep_node, hash) ;
198
+
199
+ SerializedMetadataHash {
200
+ def_index : def_id. index ,
201
+ hash : hash,
202
+ }
203
+ } ) ;
209
204
210
205
// Collect these up into a vector.
211
- SerializedMetadataHashes {
212
- hashes : hashes. collect ( )
213
- }
206
+ SerializedMetadataHashes { hashes : hashes. collect ( ) }
214
207
} ;
215
208
216
209
// Encode everything.
@@ -219,21 +212,17 @@ pub fn encode_metadata_hashes<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
219
212
Ok ( ( ) )
220
213
}
221
214
222
- pub fn encode_work_products ( sess : & Session ,
223
- encoder : & mut Encoder )
224
- -> io:: Result < ( ) >
225
- {
226
- let work_products: Vec < _ > =
227
- sess. dep_graph . work_products ( )
228
- . iter ( )
229
- . map ( |( id, work_product) | {
230
- SerializedWorkProduct {
231
- id : id. clone ( ) ,
232
- work_product : work_product. clone ( ) ,
233
- }
234
- } )
235
- . collect ( ) ;
215
+ pub fn encode_work_products ( sess : & Session , encoder : & mut Encoder ) -> io:: Result < ( ) > {
216
+ let work_products: Vec < _ > = sess. dep_graph
217
+ . work_products ( )
218
+ . iter ( )
219
+ . map ( |( id, work_product) | {
220
+ SerializedWorkProduct {
221
+ id : id. clone ( ) ,
222
+ work_product : work_product. clone ( ) ,
223
+ }
224
+ } )
225
+ . collect ( ) ;
236
226
237
227
work_products. encode ( encoder)
238
228
}
239
-
0 commit comments