|
| 1 | +#!/usr/bin/env python |
| 2 | + |
| 3 | +from __future__ import absolute_import |
| 4 | + |
| 5 | +import argparse |
| 6 | +import itertools |
| 7 | +import os |
| 8 | + |
| 9 | +from sagemaker import Session |
| 10 | +from sagemaker.estimator import Framework |
| 11 | +from sagemaker.tensorflow import TensorFlow |
| 12 | + |
| 13 | +default_bucket = Session().default_bucket |
| 14 | +dir_path = os.path.dirname(os.path.realpath(__file__)) |
| 15 | + |
| 16 | +_DEFAULT_HYPERPARAMETERS = { |
| 17 | + 'batch_size': 32, |
| 18 | + 'model': 'resnet32', |
| 19 | + 'num_epochs': 10, |
| 20 | + 'data_format': 'NHWC', |
| 21 | + 'summary_verbosity': 1, |
| 22 | + 'save_summaries_steps': 10, |
| 23 | + 'data_name': 'cifar10' |
| 24 | +} |
| 25 | + |
| 26 | + |
| 27 | +class ScriptModeTensorFlow(Framework): |
| 28 | + """This class is temporary until the final version of Script Mode is released. |
| 29 | + """ |
| 30 | + |
| 31 | + __framework_name__ = "tensorflow-scriptmode-beta" |
| 32 | + |
| 33 | + create_model = TensorFlow.create_model |
| 34 | + |
| 35 | + def __init__(self, py_version='py3', **kwargs): |
| 36 | + super(ScriptModeTensorFlow, self).__init__(**kwargs) |
| 37 | + self.py_version = py_version |
| 38 | + self.image_name = None |
| 39 | + self.framework_version = '1.10.0' |
| 40 | + |
| 41 | + |
| 42 | +def get_args(): |
| 43 | + parser = argparse.ArgumentParser() |
| 44 | + parser.add_argument('-t', '--instance-types', nargs='+', help='<Required> Set flag', required=True) |
| 45 | + parser.add_argument('-r', '--role', required=True) |
| 46 | + parser.add_argument('-w', '--wait', action='store_true') |
| 47 | + parser.add_argument('--region', default='us-west-2') |
| 48 | + parser.add_argument('--py-versions', nargs='+', help='<Required> Set flag', default=['py3']) |
| 49 | + parser.add_argument('--checkpoint-path', |
| 50 | + default=os.path.join(default_bucket(), 'benchmarks', 'checkpoints'), |
| 51 | + help='The S3 location where the model checkpoints and tensorboard events are saved after training') |
| 52 | + |
| 53 | + return parser.parse_known_args() |
| 54 | + |
| 55 | + |
| 56 | +def main(args, script_args): |
| 57 | + for instance_type, py_version in itertools.product(args.instance_types, args.py_versions): |
| 58 | + base_name = '%s-%s-%s' % (py_version, instance_type[3:5], instance_type[6:]) |
| 59 | + model_dir = os.path.join(args.checkpoint_path, base_name) |
| 60 | + |
| 61 | + job_hps = create_hyperparameters(model_dir, script_args) |
| 62 | + |
| 63 | + print('hyperparameters:') |
| 64 | + print(job_hps) |
| 65 | + |
| 66 | + estimator = ScriptModeTensorFlow( |
| 67 | + entry_point='tf_cnn_benchmarks.py', |
| 68 | + role='SageMakerRole', |
| 69 | + source_dir=os.path.join(dir_path, 'tf_cnn_benchmarks'), |
| 70 | + base_job_name=base_name, |
| 71 | + train_instance_count=1, |
| 72 | + hyperparameters=job_hps, |
| 73 | + train_instance_type=instance_type, |
| 74 | + ) |
| 75 | + |
| 76 | + input_dir = 's3://sagemaker-sample-data-%s/spark/mnist/train/' % args.region |
| 77 | + estimator.fit({'train': input_dir}, wait=args.wait) |
| 78 | + |
| 79 | + print("To use TensorBoard, execute the following command:") |
| 80 | + cmd = 'S3_USE_HTTPS=0 S3_VERIFY_SSL=0 AWS_REGION=%s tensorboard --host localhost --port 6006 --logdir %s' |
| 81 | + print(cmd % (args.region, args.checkpoint_path)) |
| 82 | + |
| 83 | + |
| 84 | +def create_hyperparameters(model_dir, script_args): |
| 85 | + job_hps = _DEFAULT_HYPERPARAMETERS.copy() |
| 86 | + |
| 87 | + job_hps.update({'train_dir': model_dir, 'eval_dir': model_dir}) |
| 88 | + |
| 89 | + script_arg_keys_without_dashes = [key[2:] if key.startswith('--') else key[1:] for key in script_args[::2]] |
| 90 | + script_arg_values = script_args[1::2] |
| 91 | + job_hps.update(dict(zip(script_arg_keys_without_dashes, script_arg_values))) |
| 92 | + |
| 93 | + return job_hps |
| 94 | + |
| 95 | + |
| 96 | +if __name__ == '__main__': |
| 97 | + args, script_args = get_args() |
| 98 | + main(args, script_args) |
0 commit comments