16
16
# `swiftc -stats-output-dir` and emits summary data, traces etc. for analysis.
17
17
18
18
import argparse
19
+ import csv
19
20
import json
20
21
import os
21
22
import random
@@ -38,6 +39,9 @@ def __init__(self, jobkind, jobid, module, start_usec, dur_usec,
38
39
def is_driver_job (self ):
39
40
return self .jobkind == 'driver'
40
41
42
+ def is_frontend_job (self ):
43
+ return self .jobkind == 'frontend'
44
+
41
45
def driver_jobs_ran (self ):
42
46
assert (self .is_driver_job ())
43
47
return self .stats .get ("Driver.NumDriverJobsRun" , 0 )
@@ -72,7 +76,7 @@ def incrementality_percentage(self):
72
76
assert (self .is_driver_job ())
73
77
ran = self .driver_jobs_ran ()
74
78
total = self .driver_jobs_total ()
75
- return ( float (ran ) / float (total )) * 100.0
79
+ return round (( float (ran ) / float (total )) * 100.0 , 2 )
76
80
77
81
# Return a JSON-formattable object of the form preferred by google chrome's
78
82
# 'catapult' trace-viewer.
@@ -108,22 +112,26 @@ def load_stats_dir(path):
108
112
patstr = (r"time\.swift-" + jobkind +
109
113
r"\.(?P<module>[^\.]+)(?P<filename>.*)\.wall$" )
110
114
pat = re .compile (patstr )
115
+ stats = dict ()
111
116
for (k , v ) in j .items ():
117
+ if k .startswith ("time." ):
118
+ v = int (1000000.0 * float (v ))
119
+ stats [k ] = v
112
120
tm = re .match (pat , k )
113
121
if tm :
114
122
tmg = tm .groupdict ()
115
- dur_usec = int ( 1000000.0 * float ( v ))
123
+ dur_usec = v
116
124
module = tmg ['module' ]
117
125
if 'filename' in tmg :
118
126
ff = tmg ['filename' ]
119
127
if ff .startswith ('.' ):
120
128
ff = ff [1 :]
121
129
jobargs = [ff ]
122
- break
130
+
123
131
e = JobStats (jobkind = jobkind , jobid = jobid ,
124
132
module = module , start_usec = start_usec ,
125
133
dur_usec = dur_usec , jobargs = jobargs ,
126
- stats = j )
134
+ stats = stats )
127
135
jobstats .append (e )
128
136
return jobstats
129
137
@@ -168,39 +176,81 @@ def merge_all_jobstats(jobstats):
168
176
169
177
170
178
def show_paired_incrementality (args ):
179
+ fieldnames = ["old_pct" , "old_skip" ,
180
+ "new_pct" , "new_skip" ,
181
+ "delta_pct" , "delta_skip" ,
182
+ "name" ]
183
+ out = csv .DictWriter (args .output , fieldnames , dialect = 'excel-tab' )
184
+ out .writeheader ()
185
+
171
186
for (name , (oldstats , newstats )) in load_paired_stats_dirs (args ):
172
187
olddriver = merge_all_jobstats ([x for x in oldstats
173
188
if x .is_driver_job ()])
174
189
newdriver = merge_all_jobstats ([x for x in newstats
175
190
if x .is_driver_job ()])
176
191
if olddriver is None or newdriver is None :
177
192
continue
178
- if args .csv :
179
- args .output .write ("'%s',%d,%d\n " % (name ,
180
- olddriver .driver_jobs_ran (),
181
- newdriver .driver_jobs_ran ()))
182
- else :
183
- oldpct = olddriver .incrementality_percentage ()
184
- newpct = newdriver .incrementality_percentage ()
185
- deltapct = newpct - oldpct
186
- oldskip = olddriver .driver_jobs_skipped ()
187
- newskip = newdriver .driver_jobs_skipped ()
188
- deltaskip = newskip - oldskip
189
- fmt = "{}:\t " + "\t " .join ([lab + ":{:>3.0f}% ({} skipped)" for
190
- lab in ['old' , 'new' , 'delta' ]]) + "\n "
191
- args .output .write (fmt .format (name ,
192
- oldpct , oldskip ,
193
- newpct , newskip ,
194
- deltapct , deltaskip ))
193
+ oldpct = olddriver .incrementality_percentage ()
194
+ newpct = newdriver .incrementality_percentage ()
195
+ deltapct = newpct - oldpct
196
+ oldskip = olddriver .driver_jobs_skipped ()
197
+ newskip = newdriver .driver_jobs_skipped ()
198
+ deltaskip = newskip - oldskip
199
+ out .writerow (dict (name = name ,
200
+ old_pct = oldpct , old_skip = oldskip ,
201
+ new_pct = newpct , new_skip = newskip ,
202
+ delta_pct = deltapct , delta_skip = deltaskip ))
195
203
196
204
197
205
def show_incrementality (args ):
206
+ fieldnames = ["incrementality" , "name" ]
207
+ out = csv .DictWriter (args .output , fieldnames , dialect = 'excel-tab' )
208
+ out .writeheader ()
209
+
198
210
for path in args .remainder :
199
211
stats = load_stats_dir (path )
200
212
for s in stats :
201
213
if s .is_driver_job ():
202
214
pct = s .incrementality_percentage ()
203
- args .output .write ("%s: %f\n " % (os .path .basename (path ), pct ))
215
+ out .writerow (dict (name = os .path .basename (path ),
216
+ incrementality = pct ))
217
+
218
+
219
+ def compare_frontend_stats (args ):
220
+ assert (len (args .remainder ) == 2 )
221
+ (olddir , newdir ) = args .remainder
222
+
223
+ regressions = 0
224
+ fieldnames = ["old" , "new" , "delta_pct" , "name" ]
225
+ out = csv .DictWriter (args .output , fieldnames , dialect = 'excel-tab' )
226
+ out .writeheader ()
227
+
228
+ old_stats = load_stats_dir (olddir )
229
+ new_stats = load_stats_dir (newdir )
230
+ old_merged = merge_all_jobstats ([x for x in old_stats
231
+ if x .is_frontend_job ()])
232
+ new_merged = merge_all_jobstats ([x for x in new_stats
233
+ if x .is_frontend_job ()])
234
+ if old_merged is None or new_merged is None :
235
+ return regressions
236
+ for stat_name in sorted (old_merged .stats .keys ()):
237
+ if stat_name in new_merged .stats :
238
+ old = old_merged .stats [stat_name ]
239
+ new = new_merged .stats .get (stat_name , 0 )
240
+ if old == 0 or new == 0 :
241
+ continue
242
+ delta = (new - old )
243
+ delta_pct = round ((float (delta ) / float (new )) * 100.0 , 2 )
244
+ if (stat_name .startswith ("time." ) and
245
+ abs (delta ) < args .delta_usec_thresh ):
246
+ continue
247
+ if abs (delta_pct ) < args .delta_pct_thresh :
248
+ continue
249
+ out .writerow (dict (name = stat_name , old = old , new = new ,
250
+ delta_pct = delta_pct ))
251
+ if delta > 0 :
252
+ regressions += 1
253
+ return regressions
204
254
205
255
206
256
def main ():
@@ -212,26 +262,33 @@ def main():
212
262
help = "Write output to file" )
213
263
parser .add_argument ("--paired" , action = "store_true" ,
214
264
help = "Process two dirs-of-stats-dirs, pairwise" )
215
- parser .add_argument ("--csv" , action = "store_true" ,
216
- help = "Write output as CSV" )
265
+ parser .add_argument ("--delta-pct-thresh" , type = float , default = 0.01 ,
266
+ help = "Percentage change required to report" )
267
+ parser .add_argument ("--delta-usec-thresh" , type = int , default = 100000 ,
268
+ help = "Absolute delta on times required to report" )
217
269
modes = parser .add_mutually_exclusive_group (required = True )
218
270
modes .add_argument ("--catapult" , action = "store_true" ,
219
271
help = "emit a 'catapult'-compatible trace of events" )
220
272
modes .add_argument ("--incrementality" , action = "store_true" ,
221
273
help = "summarize the 'incrementality' of a build" )
274
+ modes .add_argument ("--compare-frontend-stats" , action = "store_true" ,
275
+ help = "Compare frontend stats from two stats-dirs" )
222
276
parser .add_argument ('remainder' , nargs = argparse .REMAINDER ,
223
277
help = "stats-dirs to process" )
224
278
225
279
args = parser .parse_args ()
226
280
if len (args .remainder ) == 0 :
227
281
parser .print_help ()
228
- sys . exit ( 1 )
282
+ return 1
229
283
if args .catapult :
230
284
write_catapult_trace (args )
285
+ elif args .compare_frontend_stats :
286
+ return compare_frontend_stats (args )
231
287
elif args .incrementality :
232
288
if args .paired :
233
289
show_paired_incrementality (args )
234
290
else :
235
291
show_incrementality (args )
292
+ return None
236
293
237
- main ()
294
+ sys . exit ( main () )
0 commit comments