17
17
18
18
import argparse
19
19
import csv
20
+ import io
20
21
import itertools
21
22
import json
22
23
import os
25
26
import sys
26
27
import time
27
28
import urllib
28
- import urllib2
29
+ if sys .version_info [0 ] < 3 :
30
+ import urllib2
31
+ Request = urllib2 .Request
32
+ URLOpen = urllib2 .urlopen
33
+ else :
34
+ import urllib .request , urllib .parse , urllib .error
35
+ Request = urllib .request .Request
36
+ URLOpen = urllib .request .urlopen
29
37
from collections import namedtuple
30
38
from operator import attrgetter
31
39
@@ -49,7 +57,8 @@ def stat_name_minus_module(name):
49
57
def vars_of_args (args ):
50
58
vargs = vars (args )
51
59
if args .select_stats_from_csv_baseline is not None :
52
- b = read_stats_dict_from_csv (args .select_stats_from_csv_baseline )
60
+ with io .open (args .select_stats_from_csv_baseline , 'r' , encoding = 'utf-8' ) as f :
61
+ b = read_stats_dict_from_csv (f )
53
62
# Sniff baseline stat-names to figure out if they're module-qualified
54
63
# even when the user isn't asking us to _output_ module-grouped data.
55
64
all_triples = all (len (k .split ('.' )) == 3 for k in b .keys ())
@@ -105,18 +114,18 @@ def write_lnt_values(args):
105
114
json .dump (j , args .output , indent = 4 )
106
115
else :
107
116
url = args .lnt_submit
108
- print "\n submitting to LNT server: " + url
117
+ print ( "\n submitting to LNT server: " + url )
109
118
json_report = {'input_data' : json .dumps (j ), 'commit' : '1' }
110
119
data = urllib .urlencode (json_report )
111
- response_str = urllib2 . urlopen ( urllib2 . Request (url , data ))
120
+ response_str = URLOpen ( Request (url , data ))
112
121
response = json .loads (response_str .read ())
113
- print "### response:"
114
- print response
122
+ print ( "### response:" )
123
+ print ( response )
115
124
if 'success' in response :
116
- print "server response:\t Success"
125
+ print ( "server response:\t Success" )
117
126
else :
118
- print "server response:\t Error"
119
- print "error:\t " , response ['error' ]
127
+ print ( "server response:\t Error" )
128
+ print ( "error:\t " , response ['error' ])
120
129
sys .exit (1 )
121
130
122
131
@@ -187,8 +196,8 @@ def update_epoch_value(d, name, epoch, value):
187
196
epoch = existing_epoch
188
197
else :
189
198
(_ , delta_pct ) = diff_and_pct (existing_value , value )
190
- print ("note: changing value %d -> %d (%.2f%%) for %s" %
191
- (existing_value , value , delta_pct , name ))
199
+ print ("note: changing value %d -> %d (%.2f%%) for %s" %
200
+ (existing_value , value , delta_pct , name ))
192
201
changed = 1
193
202
d [name ] = (epoch , value )
194
203
return (epoch , value , changed )
@@ -233,22 +242,28 @@ def set_csv_baseline(args):
233
242
existing = None
234
243
vargs = vars_of_args (args )
235
244
if os .path .exists (args .set_csv_baseline ):
236
- with open (args .set_csv_baseline , "r" ) as f :
245
+ with io . open (args .set_csv_baseline , "r" , encoding = 'utf-8' , newline = ' \n ' ) as f :
237
246
ss = vargs ['select_stat' ]
238
247
existing = read_stats_dict_from_csv (f , select_stat = ss )
239
- print ("updating %d baseline entries in %s" %
240
- (len (existing ), args .set_csv_baseline ))
248
+ print ("updating %d baseline entries in %s" %
249
+ (len (existing ), args .set_csv_baseline ))
241
250
else :
242
- print "making new baseline " + args .set_csv_baseline
251
+ print ( "making new baseline " + args .set_csv_baseline )
243
252
fieldnames = ["epoch" , "name" , "value" ]
244
- with open (args .set_csv_baseline , "wb" ) as f :
253
+
254
+ def _open (path ):
255
+ if sys .version_info [0 ] < 3 :
256
+ return open (path , 'wb' )
257
+ return io .open (path , "w" , encoding = 'utf-8' , newline = '\n ' )
258
+
259
+ with _open (args .set_csv_baseline ) as f :
245
260
out = csv .DictWriter (f , fieldnames , dialect = 'excel-tab' ,
246
261
quoting = csv .QUOTE_NONNUMERIC )
247
262
m = merge_all_jobstats ((s for d in args .remainder
248
263
for s in load_stats_dir (d , ** vargs )),
249
264
** vargs )
250
265
if m is None :
251
- print "no stats found"
266
+ print ( "no stats found" )
252
267
return 1
253
268
changed = 0
254
269
newepoch = int (time .time ())
@@ -265,7 +280,7 @@ def set_csv_baseline(args):
265
280
name = name ,
266
281
value = int (value )))
267
282
if existing is not None :
268
- print "changed %d entries in baseline" % changed
283
+ print ( "changed %d entries in baseline" % changed )
269
284
return 0
270
285
271
286
@@ -402,8 +417,8 @@ def keyfunc(e):
402
417
403
418
def compare_to_csv_baseline (args ):
404
419
vargs = vars_of_args (args )
405
- old_stats = read_stats_dict_from_csv (args .compare_to_csv_baseline ,
406
- select_stat = vargs ['select_stat' ])
420
+ with io . open (args .compare_to_csv_baseline , 'r' , encoding = 'utf-8' ) as f :
421
+ old_stats = read_stats_dict_from_csv ( f , select_stat = vargs ['select_stat' ])
407
422
m = merge_all_jobstats ((s for d in args .remainder
408
423
for s in load_stats_dir (d , ** vargs )),
409
424
** vargs )
@@ -585,7 +600,7 @@ def main():
585
600
action = "append" ,
586
601
help = "Select specific statistics" )
587
602
parser .add_argument ("--select-stats-from-csv-baseline" ,
588
- type = argparse . FileType ( 'rb' , 0 ) , default = None ,
603
+ type = str , default = None ,
589
604
help = "Select statistics present in a CSV baseline" )
590
605
parser .add_argument ("--exclude-timers" ,
591
606
default = False ,
@@ -635,8 +650,7 @@ def main():
635
650
help = "summarize the 'incrementality' of a build" )
636
651
modes .add_argument ("--set-csv-baseline" , type = str , default = None ,
637
652
help = "Merge stats from a stats-dir into a CSV baseline" )
638
- modes .add_argument ("--compare-to-csv-baseline" ,
639
- type = argparse .FileType ('rb' , 0 ), default = None ,
653
+ modes .add_argument ("--compare-to-csv-baseline" , type = str , default = None ,
640
654
metavar = "BASELINE.csv" ,
641
655
help = "Compare stats dir to named CSV baseline" )
642
656
modes .add_argument ("--compare-stats-dirs" ,
0 commit comments