|
50 | 50 | "cell_type": "code",
|
51 | 51 | "execution_count": null,
|
52 | 52 | "metadata": {
|
53 |
| - "collapsed": true, |
54 | 53 | "isConfigCell": true
|
55 | 54 | },
|
56 | 55 | "outputs": [],
|
|
69 | 68 | "assumed_role = boto3.client('sts').get_caller_identity()['Arn']\n",
|
70 | 69 | "role = re.sub(r'^(.+)sts::(\\d+):assumed-role/(.+?)/.*$', r'\\1iam::\\2:role/\\3', assumed_role)\n",
|
71 | 70 | "\n",
|
72 |
| - "kms_key_id = '<your_kms_key_arn_here>'\n", |
| 71 | + "kms_key_id = '<your-kms-key-id>'\n", |
73 | 72 | "\n",
|
74 |
| - "bucket='<your_s3_bucket_name_here>' # put your s3 bucket name here, and create s3 bucket\n", |
75 |
| - "prefix = 'sagemarker/kms-new'\n", |
| 73 | + "bucket='<s3-bucket>' # put your s3 bucket name here, and create s3 bucket\n", |
| 74 | + "prefix = 'sagemaker/kms'\n", |
76 | 75 | "# customize to your bucket where you have stored the data\n",
|
77 | 76 | "bucket_path = 'https://s3-{}.amazonaws.com/{}'.format(region,bucket)"
|
78 | 77 | ]
|
|
90 | 89 | "We, first, read the dataset from an existing repository into memory. This processing could be done *in situ* by Amazon Athena, Apache Spark in Amazon EMR, Amazon Redshift, etc., assuming the dataset is present in the appropriate location. Then, the next step would be to transfer the data to S3 for use in training. For small datasets, such as the one used below, reading into memory isn't onerous, though it would be for larger datasets."
|
91 | 90 | ]
|
92 | 91 | },
|
93 |
| - { |
94 |
| - "cell_type": "code", |
95 |
| - "execution_count": null, |
96 |
| - "metadata": { |
97 |
| - "collapsed": true |
98 |
| - }, |
99 |
| - "outputs": [], |
100 |
| - "source": [ |
101 |
| - "!conda install -y -c conda-forge scikit-learn" |
102 |
| - ] |
103 |
| - }, |
104 | 92 | {
|
105 | 93 | "cell_type": "code",
|
106 | 94 | "execution_count": null,
|
|
189 | 177 | {
|
190 | 178 | "cell_type": "code",
|
191 | 179 | "execution_count": null,
|
192 |
| - "metadata": { |
193 |
| - "collapsed": true |
194 |
| - }, |
| 180 | + "metadata": {}, |
195 | 181 | "outputs": [],
|
196 | 182 | "source": [
|
197 | 183 | "s3 = boto3.client('s3')\n",
|
|
248 | 234 | {
|
249 | 235 | "cell_type": "code",
|
250 | 236 | "execution_count": null,
|
251 |
| - "metadata": { |
252 |
| - "collapsed": true |
253 |
| - }, |
| 237 | + "metadata": {}, |
254 | 238 | "outputs": [],
|
255 | 239 | "source": [
|
256 | 240 | "%%time\n",
|
|
299 | 283 | " \"S3DataDistributionType\": \"FullyReplicated\"\n",
|
300 | 284 | " }\n",
|
301 | 285 | " },\n",
|
302 |
| - " \"ContentType\": \"libsvm\",\n", |
| 286 | + " \"ContentType\": \"csv\",\n", |
303 | 287 | " \"CompressionType\": \"None\"\n",
|
304 | 288 | " },\n",
|
305 | 289 | " {\n",
|
|
320 | 304 | "client = boto3.client('sagemaker')\n",
|
321 | 305 | "client.create_training_job(**create_training_params)\n",
|
322 | 306 | "\n",
|
323 |
| - "status = client.describe_training_job(TrainingJobName=job_name)['TrainingJobStatus']\n", |
324 |
| - "print(status)\n", |
325 |
| - "while status !='Completed' and status!='Failed':\n", |
326 |
| - " time.sleep(60)\n", |
327 |
| - " status = client.describe_training_job(TrainingJobName=job_name)['TrainingJobStatus']\n", |
328 |
| - " print(status)" |
| 307 | + "try:\n", |
| 308 | + " # wait for the job to finish and report the ending status\n", |
| 309 | + " client.get_waiter('TrainingJob_Created').wait(TrainingJobName=job_name)\n", |
| 310 | + " training_info = client.describe_training_job(TrainingJobName=job_name)\n", |
| 311 | + " status = training_info['TrainingJobStatus']\n", |
| 312 | + " print(\"Training job ended with status: \" + status)\n", |
| 313 | + "except:\n", |
| 314 | + " print('Training failed to start')\n", |
| 315 | + " # if exception is raised, that means it has failed\n", |
| 316 | + " message = client.describe_training_job(TrainingJobName=job_name)['FailureReason']\n", |
| 317 | + " print('Training failed with the following error: {}'.format(message))" |
329 | 318 | ]
|
330 | 319 | },
|
331 | 320 | {
|
|
343 | 332 | {
|
344 | 333 | "cell_type": "code",
|
345 | 334 | "execution_count": null,
|
346 |
| - "metadata": { |
347 |
| - "collapsed": true |
348 |
| - }, |
| 335 | + "metadata": {}, |
349 | 336 | "outputs": [],
|
350 | 337 | "source": [
|
351 | 338 | "%%time\n",
|
|
384 | 371 | {
|
385 | 372 | "cell_type": "code",
|
386 | 373 | "execution_count": null,
|
387 |
| - "metadata": { |
388 |
| - "collapsed": true |
389 |
| - }, |
| 374 | + "metadata": {}, |
390 | 375 | "outputs": [],
|
391 | 376 | "source": [
|
392 | 377 | "from time import gmtime, strftime\n",
|
|
416 | 401 | {
|
417 | 402 | "cell_type": "code",
|
418 | 403 | "execution_count": null,
|
419 |
| - "metadata": { |
420 |
| - "collapsed": true |
421 |
| - }, |
| 404 | + "metadata": {}, |
422 | 405 | "outputs": [],
|
423 | 406 | "source": [
|
424 | 407 | "%%time\n",
|
|
431 | 414 | " EndpointConfigName=endpoint_config_name)\n",
|
432 | 415 | "print(create_endpoint_response['EndpointArn'])\n",
|
433 | 416 | "\n",
|
434 |
| - "resp = client.describe_endpoint(EndpointName=endpoint_name)\n", |
435 |
| - "status = resp['EndpointStatus']\n", |
436 |
| - "print(\"Status: \" + status)\n", |
437 | 417 | "\n",
|
438 |
| - "while status=='Creating':\n", |
439 |
| - " time.sleep(60)\n", |
440 |
| - " resp = client.describe_endpoint(EndpointName=endpoint_name)\n", |
441 |
| - " status = resp['EndpointStatus']\n", |
442 |
| - " print(\"Status: \" + status)\n", |
| 418 | + "print('EndpointArn = {}'.format(create_endpoint_response['EndpointArn']))\n", |
| 419 | + "\n", |
| 420 | + "# get the status of the endpoint\n", |
| 421 | + "response = client.describe_endpoint(EndpointName=endpoint_name)\n", |
| 422 | + "status = response['EndpointStatus']\n", |
| 423 | + "print('EndpointStatus = {}'.format(status))\n", |
| 424 | + "\n", |
443 | 425 | "\n",
|
444 |
| - "print(\"Arn: \" + resp['EndpointArn'])\n", |
445 |
| - "print(\"Status: \" + status)" |
| 426 | + "# wait until the status has changed\n", |
| 427 | + "client.get_waiter('Endpoint_Created').wait(EndpointName=endpoint_name)\n", |
| 428 | + "\n", |
| 429 | + "\n", |
| 430 | + "# print the status of the endpoint\n", |
| 431 | + "endpoint_response = client.describe_endpoint(EndpointName=endpoint_name)\n", |
| 432 | + "status = endpoint_response['EndpointStatus']\n", |
| 433 | + "print('Endpoint creation ended with EndpointStatus = {}'.format(status))\n", |
| 434 | + "\n", |
| 435 | + "if status != 'InService':\n", |
| 436 | + " raise Exception('Endpoint creation failed.')" |
446 | 437 | ]
|
447 | 438 | },
|
448 | 439 | {
|
|
508 | 499 | {
|
509 | 500 | "cell_type": "code",
|
510 | 501 | "execution_count": null,
|
511 |
| - "metadata": { |
512 |
| - "collapsed": true |
513 |
| - }, |
| 502 | + "metadata": {}, |
514 | 503 | "outputs": [],
|
515 | 504 | "source": [
|
516 | 505 | "%%time\n",
|
|
542 | 531 | {
|
543 | 532 | "cell_type": "code",
|
544 | 533 | "execution_count": null,
|
545 |
| - "metadata": { |
546 |
| - "collapsed": true |
547 |
| - }, |
| 534 | + "metadata": {}, |
548 | 535 | "outputs": [],
|
549 | 536 | "source": [
|
550 |
| - "#client.delete_endpoint(EndpointName=endpoint_name)" |
| 537 | + "# client.delete_endpoint(EndpointName=endpoint_name)" |
551 | 538 | ]
|
552 | 539 | }
|
553 | 540 | ],
|
|
0 commit comments