Skip to content

Commit 4735754

Browse files
authored
python black --line-length=100 (aws#279)
1 parent 0dceec1 commit 4735754

File tree

164 files changed

+6774
-4792
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

164 files changed

+6774
-4792
lines changed

bin/build_binaries.py

Lines changed: 55 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -4,47 +4,71 @@
44
import os
55
import boto3
66

7-
parser = argparse.ArgumentParser(description='Build Tornasole binaries')
8-
parser.add_argument('--upload', default=False,
9-
dest='upload', action='store_true',
10-
help='Pass --upload if you want to upload the binaries'
11-
'built to the s3 location')
12-
parser.add_argument('--replace-latest', default=False,
13-
dest='replace_latest', action='store_true',
14-
help='Pass --replace-latest if you want to upload the new binary to '
15-
'replace the latest binary in the S3 location. Note that'
16-
'this also requires you to pass --upload')
7+
parser = argparse.ArgumentParser(description="Build Tornasole binaries")
8+
parser.add_argument(
9+
"--upload",
10+
default=False,
11+
dest="upload",
12+
action="store_true",
13+
help="Pass --upload if you want to upload the binaries" "built to the s3 location",
14+
)
15+
parser.add_argument(
16+
"--replace-latest",
17+
default=False,
18+
dest="replace_latest",
19+
action="store_true",
20+
help="Pass --replace-latest if you want to upload the new binary to "
21+
"replace the latest binary in the S3 location. Note that"
22+
"this also requires you to pass --upload",
23+
)
1724
args = parser.parse_args()
1825
exec(open("tornasole/_version.py").read())
1926

2027
VERSION = __version__
21-
BINARIES = [
22-
'mxnet',
23-
'tensorflow',
24-
'pytorch',
25-
'xgboost',
26-
'rules'
27-
]
28+
BINARIES = ["mxnet", "tensorflow", "pytorch", "xgboost", "rules"]
2829

2930
for b in BINARIES:
30-
if b == 'rules':
31-
env_var = 'TORNASOLE_FOR_RULES'
31+
if b == "rules":
32+
env_var = "TORNASOLE_FOR_RULES"
3233
else:
33-
env_var = 'TORNASOLE_WITH_' + b.upper()
34+
env_var = "TORNASOLE_WITH_" + b.upper()
3435
env = dict(os.environ)
35-
env[env_var] = '1'
36-
subprocess.check_call([sys.executable, 'setup.py', 'bdist_wheel', '--universal'],
37-
env=env)
36+
env[env_var] = "1"
37+
subprocess.check_call([sys.executable, "setup.py", "bdist_wheel", "--universal"], env=env)
3838
if args.upload:
39-
subprocess.check_call(['aws', 's3', 'cp', 'dist/tornasole-{}-py2.py3-none-any.whl'.format(VERSION),
40-
's3://tornasole-binaries-use1/tornasole_{}/py3/'.format(b)])
39+
subprocess.check_call(
40+
[
41+
"aws",
42+
"s3",
43+
"cp",
44+
"dist/tornasole-{}-py2.py3-none-any.whl".format(VERSION),
45+
"s3://tornasole-binaries-use1/tornasole_{}/py3/".format(b),
46+
]
47+
)
4148

4249
if args.replace_latest:
4350
# upload current version
44-
subprocess.check_call(['aws', 's3', 'cp',
45-
's3://tornasole-binaries-use1/tornasole_{}/py3/tornasole-{}-py2.py3-none-any.whl'.format(b, VERSION),
46-
's3://tornasole-binaries-use1/tornasole_{}/py3/latest/'.format(b)])
51+
subprocess.check_call(
52+
[
53+
"aws",
54+
"s3",
55+
"cp",
56+
"s3://tornasole-binaries-use1/tornasole_{}/py3/tornasole-{}-py2.py3-none-any.whl".format(
57+
b, VERSION
58+
),
59+
"s3://tornasole-binaries-use1/tornasole_{}/py3/latest/".format(b),
60+
]
61+
)
4762
# remove other versions
48-
subprocess.check_call(['aws', 's3', 'rm', '--recursive', '--exclude', 'tornasole-{}*'.format(VERSION),
49-
's3://tornasole-binaries-use1/tornasole_{}/py3/latest/'.format(b)])
50-
subprocess.check_call(['rm', '-rf', 'dist', 'build', '*.egg-info', '.eggs'])
63+
subprocess.check_call(
64+
[
65+
"aws",
66+
"s3",
67+
"rm",
68+
"--recursive",
69+
"--exclude",
70+
"tornasole-{}*".format(VERSION),
71+
"s3://tornasole-binaries-use1/tornasole_{}/py3/latest/".format(b),
72+
]
73+
)
74+
subprocess.check_call(["rm", "-rf", "dist", "build", "*.egg-info", ".eggs"])

bin/build_containers.py

Lines changed: 29 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -6,35 +6,42 @@
66

77

88
FRAMEWORK_VERSIONS = {
9-
'mxnet': '1.4.1',
10-
'pytorch': '1.1.0',
11-
'tensorflow': '1.14.0',
12-
'xgboost': '0.90'
13-
}
9+
"mxnet": "1.4.1",
10+
"pytorch": "1.1.0",
11+
"tensorflow": "1.14.0",
12+
"xgboost": "0.90",
13+
}
1414

1515

1616
def run_command(command_list, stdout, stderr):
17-
subprocess.check_call(command_list, stdout=stdout, stderr=stderr)
17+
subprocess.check_call(command_list, stdout=stdout, stderr=stderr)
1818

1919

2020
# you can clean all caches used by docker with `docker system prune -a`
2121
def build_container(framework, version, args):
22-
command = ['bash', 'bin/sagemaker-containers/{}/{}/build.sh'.format(framework, version), args.tag]
22+
command = [
23+
"bash",
24+
"bin/sagemaker-containers/{}/{}/build.sh".format(framework, version),
25+
args.tag,
26+
]
2327
if not args.single_process:
24-
with open(os.path.join(args.logs_path, '{}.log'.format(framework)), "w") as logfile:
28+
with open(os.path.join(args.logs_path, "{}.log".format(framework)), "w") as logfile:
2529
run_command(command, stdout=logfile, stderr=logfile)
2630
else:
2731
run_command(command, None, None)
2832

2933

30-
parser = argparse.ArgumentParser(description='Build Tornasole binaries')
31-
parser.add_argument('--tag', type=str, default='temp',
32-
help='Pass the tag to upload the image to ECR with. '
33-
'You might want to set the tag to latest for '
34-
'final images.')
35-
parser.add_argument('--single-process', action='store_true',
36-
dest='single_process', default=False)
37-
parser.add_argument('--logs-path', type=str, default='bin/sagemaker-containers/logs/')
34+
parser = argparse.ArgumentParser(description="Build Tornasole binaries")
35+
parser.add_argument(
36+
"--tag",
37+
type=str,
38+
default="temp",
39+
help="Pass the tag to upload the image to ECR with. "
40+
"You might want to set the tag to latest for "
41+
"final images.",
42+
)
43+
parser.add_argument("--single-process", action="store_true", dest="single_process", default=False)
44+
parser.add_argument("--logs-path", type=str, default="bin/sagemaker-containers/logs/")
3845
args = parser.parse_args()
3946

4047
if not os.path.exists(args.logs_path):
@@ -44,8 +51,11 @@ def build_container(framework, version, args):
4451
for f, v in FRAMEWORK_VERSIONS.items():
4552
p = Process(name=f, target=build_container, args=(f, v, args))
4653
p.start()
47-
print('Started building container for {}. You can find the log at {}.log'
48-
.format(f, os.path.join(args.logs_path, f)))
54+
print(
55+
"Started building container for {}. You can find the log at {}.log".format(
56+
f, os.path.join(args.logs_path, f)
57+
)
58+
)
4959
if args.single_process:
5060
p.join()
5161
else:
@@ -59,6 +69,6 @@ def build_container(framework, version, args):
5969
for p in processes:
6070
if p not in ended_processes and not p.is_alive():
6171
p.join()
62-
print(f'Finished process {p.name}')
72+
print(f"Finished process {p.name}")
6373
ended_processes.add(p)
6474
sleep(3)

bin/sagemaker-containers/tensorflow/run_sagemaker.py

Lines changed: 27 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -3,33 +3,39 @@
33
import os
44

55
dir_path = os.path.dirname(os.path.realpath(__file__))
6-
train_script_path = os.path.join(dir_path, 'tf-train.py')
6+
train_script_path = os.path.join(dir_path, "tf-train.py")
77

8-
tag = os.environ.get('SM_TESTING_TAG', 'DEFAULTTAGWHICHWILLFAIL')
9-
available_versions = ['1.13.1']
8+
tag = os.environ.get("SM_TESTING_TAG", "DEFAULTTAGWHICHWILLFAIL")
9+
available_versions = ["1.13.1"]
1010

1111
parser = argparse.ArgumentParser()
12-
parser.add_argument("--tf_version", default='1.13.1', help=f"one of {available_versions}")
12+
parser.add_argument("--tf_version", default="1.13.1", help=f"one of {available_versions}")
1313
args = parser.parse_args()
1414
version = args.tf_version
15-
assert version in ['1.13.1'], f"version={version} not in {available_versions}"
15+
assert version in ["1.13.1"], f"version={version} not in {available_versions}"
1616

17-
estimator = TensorFlow(entry_point=train_script_path,
18-
image_name=f'072677473360.dkr.ecr.us-east-1.amazonaws.com/tornasole-preprod-tf-{version}-cpu:' + tag,
19-
role='AmazonSageMaker-ExecutionRole-20190614T145575', # hardcode role name
20-
base_job_name='tornasole', #there are some restrictions on base job name so keep it simple
21-
train_instance_count=1,
22-
py_version='py3',
23-
framework_version=version,
24-
train_instance_type='ml.m4.xlarge')
17+
estimator = TensorFlow(
18+
entry_point=train_script_path,
19+
image_name=f"072677473360.dkr.ecr.us-east-1.amazonaws.com/tornasole-preprod-tf-{version}-cpu:"
20+
+ tag,
21+
role="AmazonSageMaker-ExecutionRole-20190614T145575", # hardcode role name
22+
base_job_name="tornasole", # there are some restrictions on base job name so keep it simple
23+
train_instance_count=1,
24+
py_version="py3",
25+
framework_version=version,
26+
train_instance_type="ml.m4.xlarge",
27+
)
2528
estimator.fit()
2629

27-
estimator = TensorFlow(entry_point=train_script_path,
28-
image_name=f'072677473360.dkr.ecr.us-east-1.amazonaws.com/tornasole-preprod-tf-{version}-gpu:' + tag,
29-
role='AmazonSageMaker-ExecutionRole-20190614T145575', # hardcode role name
30-
base_job_name='tornasole', #there are some restrictions on base job name so keep it simple
31-
train_instance_count=1,
32-
py_version='py3',
33-
framework_version=version,
34-
train_instance_type='ml.p2.xlarge')
30+
estimator = TensorFlow(
31+
entry_point=train_script_path,
32+
image_name=f"072677473360.dkr.ecr.us-east-1.amazonaws.com/tornasole-preprod-tf-{version}-gpu:"
33+
+ tag,
34+
role="AmazonSageMaker-ExecutionRole-20190614T145575", # hardcode role name
35+
base_job_name="tornasole", # there are some restrictions on base job name so keep it simple
36+
train_instance_count=1,
37+
py_version="py3",
38+
framework_version=version,
39+
train_instance_type="ml.p2.xlarge",
40+
)
3541
estimator.fit()

bin/sagemaker-containers/tensorflow/tf-train.py

Lines changed: 28 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -5,46 +5,49 @@
55
import time
66
import uuid
77
from tornasole.tensorflow import TornasoleHook, TornasoleOptimizer, SaveConfig
8+
89
parser = argparse.ArgumentParser()
9-
parser.add_argument('--lr', type=float, help="Learning Rate", default=0.001 )
10-
parser.add_argument('--steps', type=int, help="Number of steps to run", default=100 )
11-
parser.add_argument('--scale', type=float, help="Scaling factor for inputs", default=1.0 )
12-
parser.add_argument('--tornasole_frequency', type=float, help="How often to save TS data", default=10 )
13-
parser.add_argument('--run_name', type=str, help="Run Name", default=str(uuid.uuid4()) )
14-
parser.add_argument('--local_reductions', nargs='+', type=str, default=[] )
10+
parser.add_argument("--lr", type=float, help="Learning Rate", default=0.001)
11+
parser.add_argument("--steps", type=int, help="Number of steps to run", default=100)
12+
parser.add_argument("--scale", type=float, help="Scaling factor for inputs", default=1.0)
13+
parser.add_argument(
14+
"--tornasole_frequency", type=float, help="How often to save TS data", default=10
15+
)
16+
parser.add_argument("--run_name", type=str, help="Run Name", default=str(uuid.uuid4()))
17+
parser.add_argument("--local_reductions", nargs="+", type=str, default=[])
1518
# running in Tf estimator mode, script need to accept --model_dir parameter
16-
parser.add_argument('--model_dir', type=str, help="model dir", default=str(uuid.uuid4()) )
19+
parser.add_argument("--model_dir", type=str, help="model dir", default=str(uuid.uuid4()))
1720
args = parser.parse_args()
1821
# Network definition
19-
with tf.name_scope('foobar'):
22+
with tf.name_scope("foobar"):
2023
x = tf.placeholder(shape=(None, 2), dtype=tf.float32)
21-
w = tf.Variable(initial_value=[[10.], [10.]])
22-
with tf.name_scope('foobaz'):
23-
w0 = [[1], [1.]]
24+
w = tf.Variable(initial_value=[[10.0], [10.0]])
25+
with tf.name_scope("foobaz"):
26+
w0 = [[1], [1.0]]
2427
y = tf.matmul(x, w0)
2528
loss = tf.reduce_mean((tf.matmul(x, w) - y) ** 2, name="loss")
2629
global_step = tf.Variable(17, name="global_step", trainable=False)
27-
increment_global_step_op = tf.assign(global_step, global_step+1)
30+
increment_global_step_op = tf.assign(global_step, global_step + 1)
2831
optimizer = tf.train.AdamOptimizer(args.lr)
2932
optimizer = TornasoleOptimizer(optimizer)
3033
optimizer_op = optimizer.minimize(loss, global_step=increment_global_step_op)
3134
graph = tf.get_default_graph()
3235
list_of_tuples = [op.outputs for op in graph.get_operations()]
3336
t = str(time.time())
34-
hook = TornasoleHook("s3://tornasolecodebuildtest/container_testing/ts_outputs/tf"+t,
35-
save_config=SaveConfig(save_interval=10))
37+
hook = TornasoleHook(
38+
"s3://tornasolecodebuildtest/container_testing/ts_outputs/tf" + t,
39+
save_config=SaveConfig(save_interval=10),
40+
)
3641
sess = tf.train.MonitoredSession(hooks=[hook])
3742
for i in range(args.steps):
38-
x_ = np.random.random((10, 2)) * args.scale
39-
_loss, opt, gstep = sess.run([loss, optimizer_op, increment_global_step_op], {x: x_})
40-
print (f'Step={i}, Loss={_loss}')
41-
42-
43+
x_ = np.random.random((10, 2)) * args.scale
44+
_loss, opt, gstep = sess.run([loss, optimizer_op, increment_global_step_op], {x: x_})
45+
print(f"Step={i}, Loss={_loss}")
4346

4447

45-
#from tornasole.trials import create_trial
46-
#tr = create_trial('s3://tornasolecodebuildtest/container_testing/ts_outputs/tf'+t)
47-
#from tornasole.rules.generic import VanishingGradient
48-
#r = VanishingGradient(tr)
49-
#from tornasole.rules.rule_invoker import invoke_rule
50-
#invoke_rule(r, start_step=0, end_step=80)
48+
# from tornasole.trials import create_trial
49+
# tr = create_trial('s3://tornasolecodebuildtest/container_testing/ts_outputs/tf'+t)
50+
# from tornasole.rules.generic import VanishingGradient
51+
# r = VanishingGradient(tr)
52+
# from tornasole.rules.rule_invoker import invoke_rule
53+
# invoke_rule(r, start_step=0, end_step=80)

0 commit comments

Comments
 (0)