17
17
18
18
import argparse
19
19
import csv
20
+ import io
20
21
import itertools
21
22
import json
22
23
import os
25
26
import sys
26
27
import time
27
28
import urllib
28
- import urllib2
29
29
from collections import namedtuple
30
30
from operator import attrgetter
31
31
32
32
from jobstats import (list_stats_dir_profiles ,
33
33
load_stats_dir , merge_all_jobstats )
34
34
35
+ if sys .version_info [0 ] < 3 :
36
+ import urllib2
37
+ Request = urllib2 .Request
38
+ URLOpen = urllib2 .urlopen
39
+ else :
40
+ import urllib .request
41
+ import urllib .parse
42
+ import urllib .error
43
+ Request = urllib .request .Request
44
+ URLOpen = urllib .request .urlopen
35
45
36
46
MODULE_PAT = re .compile (r'^(\w+)\.' )
37
47
@@ -49,7 +59,8 @@ def stat_name_minus_module(name):
49
59
def vars_of_args (args ):
50
60
vargs = vars (args )
51
61
if args .select_stats_from_csv_baseline is not None :
52
- b = read_stats_dict_from_csv (args .select_stats_from_csv_baseline )
62
+ with io .open (args .select_stats_from_csv_baseline , 'r' , encoding = 'utf-8' ) as f :
63
+ b = read_stats_dict_from_csv (f )
53
64
# Sniff baseline stat-names to figure out if they're module-qualified
54
65
# even when the user isn't asking us to _output_ module-grouped data.
55
66
all_triples = all (len (k .split ('.' )) == 3 for k in b .keys ())
@@ -105,18 +116,18 @@ def write_lnt_values(args):
105
116
json .dump (j , args .output , indent = 4 )
106
117
else :
107
118
url = args .lnt_submit
108
- print "\n submitting to LNT server: " + url
119
+ print ( "\n submitting to LNT server: " + url )
109
120
json_report = {'input_data' : json .dumps (j ), 'commit' : '1' }
110
121
data = urllib .urlencode (json_report )
111
- response_str = urllib2 . urlopen ( urllib2 . Request (url , data ))
122
+ response_str = URLOpen ( Request (url , data ))
112
123
response = json .loads (response_str .read ())
113
- print "### response:"
114
- print response
124
+ print ( "### response:" )
125
+ print ( response )
115
126
if 'success' in response :
116
- print "server response:\t Success"
127
+ print ( "server response:\t Success" )
117
128
else :
118
- print "server response:\t Error"
119
- print "error:\t " , response ['error' ]
129
+ print ( "server response:\t Error" )
130
+ print ( "error:\t " , response ['error' ])
120
131
sys .exit (1 )
121
132
122
133
@@ -187,8 +198,8 @@ def update_epoch_value(d, name, epoch, value):
187
198
epoch = existing_epoch
188
199
else :
189
200
(_ , delta_pct ) = diff_and_pct (existing_value , value )
190
- print ("note: changing value %d -> %d (%.2f%%) for %s" %
191
- (existing_value , value , delta_pct , name ))
201
+ print ("note: changing value %d -> %d (%.2f%%) for %s" %
202
+ (existing_value , value , delta_pct , name ))
192
203
changed = 1
193
204
d [name ] = (epoch , value )
194
205
return (epoch , value , changed )
@@ -233,22 +244,28 @@ def set_csv_baseline(args):
233
244
existing = None
234
245
vargs = vars_of_args (args )
235
246
if os .path .exists (args .set_csv_baseline ):
236
- with open (args .set_csv_baseline , "r" ) as f :
247
+ with io . open (args .set_csv_baseline , "r" , encoding = 'utf-8' , newline = ' \n ' ) as f :
237
248
ss = vargs ['select_stat' ]
238
249
existing = read_stats_dict_from_csv (f , select_stat = ss )
239
- print ("updating %d baseline entries in %s" %
240
- (len (existing ), args .set_csv_baseline ))
250
+ print ("updating %d baseline entries in %s" %
251
+ (len (existing ), args .set_csv_baseline ))
241
252
else :
242
- print "making new baseline " + args .set_csv_baseline
253
+ print ( "making new baseline " + args .set_csv_baseline )
243
254
fieldnames = ["epoch" , "name" , "value" ]
244
- with open (args .set_csv_baseline , "wb" ) as f :
255
+
256
+ def _open (path ):
257
+ if sys .version_info [0 ] < 3 :
258
+ return open (path , 'wb' )
259
+ return io .open (path , "w" , encoding = 'utf-8' , newline = '\n ' )
260
+
261
+ with _open (args .set_csv_baseline ) as f :
245
262
out = csv .DictWriter (f , fieldnames , dialect = 'excel-tab' ,
246
263
quoting = csv .QUOTE_NONNUMERIC )
247
264
m = merge_all_jobstats ((s for d in args .remainder
248
265
for s in load_stats_dir (d , ** vargs )),
249
266
** vargs )
250
267
if m is None :
251
- print "no stats found"
268
+ print ( "no stats found" )
252
269
return 1
253
270
changed = 0
254
271
newepoch = int (time .time ())
@@ -265,7 +282,7 @@ def set_csv_baseline(args):
265
282
name = name ,
266
283
value = int (value )))
267
284
if existing is not None :
268
- print "changed %d entries in baseline" % changed
285
+ print ( "changed %d entries in baseline" % changed )
269
286
return 0
270
287
271
288
@@ -402,8 +419,8 @@ def keyfunc(e):
402
419
403
420
def compare_to_csv_baseline (args ):
404
421
vargs = vars_of_args (args )
405
- old_stats = read_stats_dict_from_csv (args .compare_to_csv_baseline ,
406
- select_stat = vargs ['select_stat' ])
422
+ with io . open (args .compare_to_csv_baseline , 'r' , encoding = 'utf-8' ) as f :
423
+ old_stats = read_stats_dict_from_csv ( f , select_stat = vargs ['select_stat' ])
407
424
m = merge_all_jobstats ((s for d in args .remainder
408
425
for s in load_stats_dir (d , ** vargs )),
409
426
** vargs )
@@ -585,7 +602,7 @@ def main():
585
602
action = "append" ,
586
603
help = "Select specific statistics" )
587
604
parser .add_argument ("--select-stats-from-csv-baseline" ,
588
- type = argparse . FileType ( 'rb' , 0 ) , default = None ,
605
+ type = str , default = None ,
589
606
help = "Select statistics present in a CSV baseline" )
590
607
parser .add_argument ("--exclude-timers" ,
591
608
default = False ,
@@ -635,8 +652,7 @@ def main():
635
652
help = "summarize the 'incrementality' of a build" )
636
653
modes .add_argument ("--set-csv-baseline" , type = str , default = None ,
637
654
help = "Merge stats from a stats-dir into a CSV baseline" )
638
- modes .add_argument ("--compare-to-csv-baseline" ,
639
- type = argparse .FileType ('rb' , 0 ), default = None ,
655
+ modes .add_argument ("--compare-to-csv-baseline" , type = str , default = None ,
640
656
metavar = "BASELINE.csv" ,
641
657
help = "Compare stats dir to named CSV baseline" )
642
658
modes .add_argument ("--compare-stats-dirs" ,
0 commit comments