Skip to content

Commit 6084a45

Browse files
authored
rename python sdk (aws#9)
1 parent 8c07c82 commit 6084a45

27 files changed

+80
-80
lines changed

im-python-sdk/1P_kmeans_highlevel/kmeans_mnist.ipynb renamed to sagemaker-python-sdk/1P_kmeans_highlevel/kmeans_mnist.ipynb

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@
5959
},
6060
"outputs": [],
6161
"source": [
62-
"role='<your IM execution role here>'\n",
62+
"role='<your SageMaker execution role here>'\n",
6363
"bucket='<bucket-name>'"
6464
]
6565
},
@@ -139,13 +139,13 @@
139139
"source": [
140140
"%%time\n",
141141
"import io\n",
142-
"import im.kmeans\n",
142+
"import sagemaker.kmeans\n",
143143
"\n",
144144
"vectors = [t.tolist() for t in train_set[0]]\n",
145145
"labels = [t.tolist() for t in train_set[1]]\n",
146146
"\n",
147147
"buf = io.BytesIO()\n",
148-
"im.kmeans.write_data_as_pb_recordio(vectors, labels, buf)\n",
148+
"sagemaker.kmeans.write_data_as_pb_recordio(vectors, labels, buf)\n",
149149
"buf.seek(0)"
150150
]
151151
},
@@ -200,7 +200,7 @@
200200
},
201201
"outputs": [],
202202
"source": [
203-
"from im.kmeans import KMeans\n",
203+
"from sagemaker.kmeans import KMeans\n",
204204
"\n",
205205
"kmeans = KMeans(role=role,\n",
206206
" train_instance_count=2,\n",
@@ -341,9 +341,9 @@
341341
},
342342
"outputs": [],
343343
"source": [
344-
"import im\n",
344+
"import sagemaker\n",
345345
"\n",
346-
"im.Session().delete_endpoint(kmeans_predictor.endpoint)"
346+
"sagemaker.Session().delete_endpoint(kmeans_predictor.endpoint)"
347347
]
348348
},
349349
{

im-python-sdk/1P_kmeans_lowlevel/kmeans_mnist_lowlevel.ipynb renamed to sagemaker-python-sdk/1P_kmeans_lowlevel/kmeans_mnist_lowlevel.ipynb

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@
5858
},
5959
"outputs": [],
6060
"source": [
61-
"role='<your IM execution role here>'\n",
61+
"role='<your SageMaker execution role here>'\n",
6262
"bucket='<bucket-name>'"
6363
]
6464
},
@@ -170,7 +170,7 @@
170170
"%%time\n",
171171
"import io\n",
172172
"\n",
173-
"# Convert the training data into the format required by the IM KMeans algorithm\n",
173+
"# Convert the training data into the format required by the SageMaker KMeans algorithm\n",
174174
"buf = io.BytesIO()\n",
175175
"write_data_as_pb_recordio(train_set, buf)\n",
176176
"buf.seek(0)"
@@ -261,17 +261,17 @@
261261
"}\n",
262262
"\n",
263263
"\n",
264-
"im = boto3.client('im')\n",
264+
"sagemaker = boto3.client('sagemaker')\n",
265265
"\n",
266-
"im.create_training_job(**create_training_params)\n",
266+
"sagemaker.create_training_job(**create_training_params)\n",
267267
"\n",
268-
"status = im.describe_training_job(TrainingJobName=job_name)['TrainingJobStatus']\n",
268+
"status = sagemaker.describe_training_job(TrainingJobName=job_name)['TrainingJobStatus']\n",
269269
"print(status)\n",
270-
"im.get_waiter('TrainingJob_Created').wait(TrainingJobName=job_name)\n",
271-
"status = im.describe_training_job(TrainingJobName=job_name)['TrainingJobStatus']\n",
270+
"sagemaker.get_waiter('TrainingJob_Created').wait(TrainingJobName=job_name)\n",
271+
"status = sagemaker.describe_training_job(TrainingJobName=job_name)['TrainingJobStatus']\n",
272272
"print(\"Training job ended with status: \" + status)\n",
273273
"if status == 'Failed':\n",
274-
" message = im.describe_training_job(TrainingJobName=job_name)['FailureReason']\n",
274+
" message = sagemaker.describe_training_job(TrainingJobName=job_name)['FailureReason']\n",
275275
" print('Training failed with the following error: {}'.format(message))\n",
276276
" raise Exception('Training job failed')\n"
277277
]
@@ -303,15 +303,15 @@
303303
"model_name=job_name\n",
304304
"print(model_name)\n",
305305
"\n",
306-
"info = im.describe_training_job(TrainingJobName=job_name)\n",
306+
"info = sagemaker.describe_training_job(TrainingJobName=job_name)\n",
307307
"model_data = info['ModelArtifacts']['S3ModelArtifacts']\n",
308308
"\n",
309309
"primary_container = {\n",
310310
" 'Image': \"900597767885.dkr.ecr.us-east-1.amazonaws.com/kmeanswebscale:latest\",\n",
311311
" 'ModelDataUrl': model_data\n",
312312
"}\n",
313313
"\n",
314-
"create_model_response = im.create_model(\n",
314+
"create_model_response = sagemaker.create_model(\n",
315315
" ModelName = model_name,\n",
316316
" ExecutionRoleArn = role,\n",
317317
" PrimaryContainer = primary_container)\n",
@@ -339,7 +339,7 @@
339339
"\n",
340340
"endpoint_config_name = 'KMeansEndpointConfig-' + strftime(\"%Y-%m-%d-%H-%M-%S\", gmtime())\n",
341341
"print(endpoint_config_name)\n",
342-
"create_endpoint_config_response = im.create_endpoint_config(\n",
342+
"create_endpoint_config_response = sagemaker.create_endpoint_config(\n",
343343
" EndpointConfigName = endpoint_config_name,\n",
344344
" ProductionVariants=[{\n",
345345
" 'InstanceType':'c4.xlarge',\n",
@@ -370,18 +370,18 @@
370370
"\n",
371371
"endpoint_name = 'KMeansEndpoint-' + strftime(\"%Y-%m-%d-%H-%M-%S\", gmtime())\n",
372372
"print(endpoint_name)\n",
373-
"create_endpoint_response = im.create_endpoint(\n",
373+
"create_endpoint_response = sagemaker.create_endpoint(\n",
374374
" EndpointName=endpoint_name,\n",
375375
" EndpointConfigName=endpoint_config_name)\n",
376376
"print(create_endpoint_response['EndpointArn'])\n",
377377
"\n",
378-
"resp = im.describe_endpoint(EndpointName=endpoint_name)\n",
378+
"resp = sagemaker.describe_endpoint(EndpointName=endpoint_name)\n",
379379
"status = resp['EndpointStatus']\n",
380380
"print(\"Status: \" + status)\n",
381381
"\n",
382-
"im.get_waiter('Endpoint_Created').wait(EndpointName=endpoint_name)\n",
382+
"sagemaker.get_waiter('Endpoint_Created').wait(EndpointName=endpoint_name)\n",
383383
"\n",
384-
"resp = im.describe_endpoint(EndpointName=endpoint_name)\n",
384+
"resp = sagemaker.describe_endpoint(EndpointName=endpoint_name)\n",
385385
"status = resp['EndpointStatus']\n",
386386
"print(\"Arn: \" + resp['EndpointArn'])\n",
387387
"print(\"Create endpoint ended with status: \" + status)\n",
@@ -504,7 +504,7 @@
504504
"metadata": {},
505505
"outputs": [],
506506
"source": [
507-
"im.delete_endpoint(EndpointName=endpoint_name)"
507+
"sagemaker.delete_endpoint(EndpointName=endpoint_name)"
508508
]
509509
},
510510
{

im-python-sdk/mxnet_gluon_cifar10/cifar10.ipynb renamed to sagemaker-python-sdk/mxnet_gluon_cifar10/cifar10.ipynb

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -22,15 +22,15 @@
2222
"import credentials # put your credentials in credentials.py\n",
2323
"import os\n",
2424
"import boto3\n",
25-
"import im\n",
26-
"from im.mxnet import MXNet\n",
25+
"import sagemaker\n",
26+
"from sagemaker.mxnet import MXNet\n",
2727
"from mxnet import gluon\n",
2828
"\n",
29-
"ims = im.Session()\n",
29+
"sagemaker_session = sagemaker.Session()\n",
3030
"\n",
31-
"# Replace with a role that gives IM access to s3 and cloudwatch\n",
32-
"# see 1-Creating_a_role_allowing_IM_to_access_S3_Cloudwatch_ECR.ipynb\n",
33-
"role='IMRole'"
31+
"# Replace with a role that gives SageMaker access to S3 and CloudWatch\n",
32+
"# see 1-Creating_a_role_allowing_SageMaker_to_access_S3_CloudWatch_ECR.ipynb\n",
33+
"role='SageMakerRole'"
3434
]
3535
},
3636
{
@@ -58,7 +58,7 @@
5858
"source": [
5959
"## Uploading the data\n",
6060
"\n",
61-
"We use the `im.Session.upload_data` function to upload our datasets to an S3 location. The return value `inputs` identifies the location -- we will use this later when we start the training job."
61+
"We use the `sagemaker.Session.upload_data` function to upload our datasets to an S3 location. The return value `inputs` identifies the location -- we will use this later when we start the training job."
6262
]
6363
},
6464
{
@@ -67,7 +67,7 @@
6767
"metadata": {},
6868
"outputs": [],
6969
"source": [
70-
"inputs = ims.upload_data(path='data', key_prefix='data/gluon-cifar10')\n",
70+
"inputs = sagemaker_session.upload_data(path='data', key_prefix='data/gluon-cifar10')\n",
7171
"print('input spec (in this case, just an S3 path): {}'.format(inputs))"
7272
]
7373
},
@@ -77,7 +77,7 @@
7777
"source": [
7878
"## Implement the training function\n",
7979
"\n",
80-
"We need to provide a training script that can run on the IM platform. The training scripts are essentially the same as one you would write for local training, except that you need to provide a `train` function. When IM calls your function, it will pass in arguments that describe the training environment. Check the script below to see how this works.\n",
80+
"We need to provide a training script that can run on the SageMaker platform. The training scripts are essentially the same as one you would write for local training, except that you need to provide a `train` function. When SageMaker calls your function, it will pass in arguments that describe the training environment. Check the script below to see how this works.\n",
8181
"\n",
8282
"The network itself is a pre-built version contained in the [Gluon Model Zoo](https://mxnet.incubator.apache.org/versions/master/api/python/gluon/model_zoo.html)."
8383
]
@@ -95,9 +95,9 @@
9595
"cell_type": "markdown",
9696
"metadata": {},
9797
"source": [
98-
"## Run the training script on IM\n",
98+
"## Run the training script on SageMaker\n",
9999
"\n",
100-
"The ```MXNet``` class allows us to run our training function as a distributed training job on IM infrastructure. We need to configure it with our training script, an IAM role, the number of training instances, and the training instance type. In this case we will run our training job on four p2.xlarge instances. "
100+
"The ```MXNet``` class allows us to run our training function as a distributed training job on SageMaker infrastructure. We need to configure it with our training script, an IAM role, the number of training instances, and the training instance type. In this case we will run our training job on four p2.xlarge instances. "
101101
]
102102
},
103103
{
@@ -122,7 +122,7 @@
122122
"cell_type": "markdown",
123123
"metadata": {},
124124
"source": [
125-
"After we've constructed our `MXNet` object, we can fit it using the data we uploaded to S3. IM makes sure our data is available in the local filesystem, so our training script can simply read the data from disk."
125+
"After we've constructed our `MXNet` object, we can fit it using the data we uploaded to S3. SageMaker makes sure our data is available in the local filesystem, so our training script can simply read the data from disk."
126126
]
127127
},
128128
{

im-python-sdk/mxnet_gluon_cifar10/cifar10.py renamed to sagemaker-python-sdk/mxnet_gluon_cifar10/cifar10.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ def train(current_host, hosts, num_cpus, num_gpus, channel_input_dirs, model_dir
3737

3838
# load training and validation data
3939
# we use the gluon.data.vision.CIFAR10 class because of its built in pre-processing logic,
40-
# but point it at the location where IM placed the data files, so it doesn't download them again.
40+
# but point it at the location where SageMaker placed the data files, so it doesn't download them again.
4141
data_dir = channel_input_dirs['training']
4242
train_data = get_train_data(num_cpus, data_dir, batch_size, (3, 32, 32))
4343
test_data = get_test_data(num_cpus, data_dir, batch_size, (3, 32, 32))

im-python-sdk/mxnet_gluon_mnist/mnist.py renamed to sagemaker-python-sdk/mxnet_gluon_mnist/mnist.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818

1919
def train(channel_input_dirs, hyperparameters, **kwargs):
20-
# IM passes num_cpus, num_gpus and other args we can use to tailor training to
20+
# SageMaker passes num_cpus, num_gpus and other args we can use to tailor training to
2121
# the current container environment, but here we just use simple cpu context.
2222
ctx = mx.cpu()
2323

@@ -30,7 +30,7 @@ def train(channel_input_dirs, hyperparameters, **kwargs):
3030

3131
# load training and validation data
3232
# we use the gluon.data.vision.MNIST class because of its built in mnist pre-processing logic,
33-
# but point it at the location where IM placed the data files, so it doesn't download them again.
33+
# but point it at the location where SageMaker placed the data files, so it doesn't download them again.
3434
training_dir = channel_input_dirs['training']
3535
train_data = get_train_data(training_dir + '/train', batch_size)
3636
val_data = get_val_data(training_dir + '/test', batch_size)

im-python-sdk/mxnet_gluon_mnist/mnist_with_gluon.ipynb renamed to sagemaker-python-sdk/mxnet_gluon_mnist/mnist_with_gluon.ipynb

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
"source": [
77
"## MNIST Training with MXNet and Gluon\n",
88
"\n",
9-
"MNIST is a widely used dataset for handwritten digit classification. It consists of 70,000 labeled 28x28 pixel grayscale images of hand-written digits. The dataset is split into 60,000 training images and 10,000 test images. There are 10 classes (one for each of the 10 digits). This tutorial will show how to train and test an MNIST model on IM using MXNet and the Gluon API.\n",
9+
"MNIST is a widely used dataset for handwritten digit classification. It consists of 70,000 labeled 28x28 pixel grayscale images of hand-written digits. The dataset is split into 60,000 training images and 10,000 test images. There are 10 classes (one for each of the 10 digits). This tutorial will show how to train and test an MNIST model on SageMaker using MXNet and the Gluon API.\n",
1010
"\n"
1111
]
1212
},
@@ -21,16 +21,16 @@
2121
"import credentials # put your credentials in credentials.py\n",
2222
"import os\n",
2323
"import boto3\n",
24-
"import im\n",
25-
"from im.mxnet import MXNet\n",
24+
"import sagemaker\n",
25+
"from sagemaker.mxnet import MXNet\n",
2626
"from mxnet import gluon\n",
2727
"\n",
2828
"\n",
29-
"ims = im.Session()\n",
29+
"sagemaker_session = sagemaker.Session()\n",
3030
"\n",
31-
"# Replace with a role that gives IM access to s3 and cloudwatch\n",
32-
"# see 1-Creating_a_role_allowing_IM_to_access_S3_Cloudwatch_ECR.ipynb\n",
33-
"role='IMRole'"
31+
"# Replace with a role that gives SageMaker access to s3 and cloudwatch\n",
32+
"# see 1-Creating_a_role_allowing_SageMaker_to_access_S3_Cloudwatch_ECR.ipynb\n",
33+
"role='SageMakerRole'"
3434
]
3535
},
3636
{
@@ -58,7 +58,7 @@
5858
"source": [
5959
"## Uploading the data\n",
6060
"\n",
61-
"We use the `im.Session.upload_data` function to upload our datasets to an S3 location. The return value `inputs` identifies the location -- we will use this later when we start the training job."
61+
"We use the `sagemaker.Session.upload_data` function to upload our datasets to an S3 location. The return value `inputs` identifies the location -- we will use this later when we start the training job."
6262
]
6363
},
6464
{
@@ -69,7 +69,7 @@
6969
},
7070
"outputs": [],
7171
"source": [
72-
"inputs = ims.upload_data(path='data', key_prefix='data/mnist')"
72+
"inputs = sagemaker_session.upload_data(path='data', key_prefix='data/mnist')"
7373
]
7474
},
7575
{
@@ -78,7 +78,7 @@
7878
"source": [
7979
"## Implement the training function\n",
8080
"\n",
81-
"We need to provide a training script that can run on the IM platform. The training scripts are essentially the same as one you would write for local training, except that you need to provide a `train` function. When IM calls your function, it will pass in arguments that describe the training environment. Check the script below to see how this works.\n",
81+
"We need to provide a training script that can run on the SageMaker platform. The training scripts are essentially the same as one you would write for local training, except that you need to provide a `train` function. When SageMaker calls your function, it will pass in arguments that describe the training environment. Check the script below to see how this works.\n",
8282
"\n",
8383
"The script here is an adaptation of the [Gluon MNIST example](https://github.com/apache/incubator-mxnet/blob/master/example/gluon/mnist.py) provided by the [Apache MXNet](https://mxnet.incubator.apache.org/) project. "
8484
]
@@ -98,9 +98,9 @@
9898
"cell_type": "markdown",
9999
"metadata": {},
100100
"source": [
101-
"## Run the training script on IM\n",
101+
"## Run the training script on SageMaker\n",
102102
"\n",
103-
"The ```MXNet``` class allows us to run our training function on IM infrastructure. We need to configure it with our training script, an IAM role, the number of training instances, and the training instance type. In this case we will run our training job on a single c4.xlarge instance. "
103+
"The ```MXNet``` class allows us to run our training function on SageMaker infrastructure. We need to configure it with our training script, an IAM role, the number of training instances, and the training instance type. In this case we will run our training job on a single c4.xlarge instance. "
104104
]
105105
},
106106
{
@@ -127,7 +127,7 @@
127127
"cell_type": "markdown",
128128
"metadata": {},
129129
"source": [
130-
"After we've constructed our `MXNet` object, we can fit it using the data we uploaded to S3. IM makes sure our data is available in the local filesystem, so our training script can simply read the data from disk.\n"
130+
"After we've constructed our `MXNet` object, we can fit it using the data we uploaded to S3. SageMaker makes sure our data is available in the local filesystem, so our training script can simply read the data from disk.\n"
131131
]
132132
},
133133
{
@@ -146,7 +146,7 @@
146146
"cell_type": "markdown",
147147
"metadata": {},
148148
"source": [
149-
"After training, we use the MXNet object to build and deploy an MXNetPredictor object. This creates an IronMan-hosted prediction service that we can use to perform inference. \n",
149+
"After training, we use the MXNet object to build and deploy an MXNetPredictor object. This creates a SageMaker endpoint that we can use to perform inference. \n",
150150
"\n",
151151
"This allows us to perform inference on json encoded multi-dimensional arrays. "
152152
]

0 commit comments

Comments
 (0)