16
16
import os
17
17
import pickle
18
18
import sys
19
+ import pytest
19
20
20
21
import numpy as np
21
22
30
31
PCA ,
31
32
RandomCutForest ,
32
33
)
33
- from sagemaker .amazon .amazon_estimator import registry
34
+ from sagemaker .amazon .amazon_estimator import get_image_uri
34
35
from sagemaker .amazon .common import read_records
35
36
from sagemaker .chainer import Chainer
36
37
from sagemaker .estimator import Estimator
65
66
SINGLE_INSTANCE_COUNT = 1
66
67
67
68
69
+ @pytest .mark .canary_quick
68
70
def test_byo_airflow_config_uploads_data_source_to_s3_when_inputs_provided (
69
71
sagemaker_session , cpu_instance_type
70
72
):
71
73
with timeout (seconds = AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS ):
72
- image_name = (
73
- registry (sagemaker_session .boto_session .region_name ) + "/factorization-machines:1"
74
- )
75
74
training_data_path = os .path .join (DATA_DIR , "dummy_tensor" )
76
75
77
76
data_source_location = "test-airflow-config-{}" .format (sagemaker_timestamp ())
@@ -80,7 +79,9 @@ def test_byo_airflow_config_uploads_data_source_to_s3_when_inputs_provided(
80
79
)
81
80
82
81
estimator = Estimator (
83
- image_name = image_name ,
82
+ image_name = get_image_uri (
83
+ sagemaker_session .boto_session .region_name , "factorization-machines"
84
+ ),
84
85
role = ROLE ,
85
86
train_instance_count = SINGLE_INSTANCE_COUNT ,
86
87
train_instance_type = cpu_instance_type ,
@@ -95,6 +96,7 @@ def test_byo_airflow_config_uploads_data_source_to_s3_when_inputs_provided(
95
96
)
96
97
97
98
99
+ @pytest .mark .canary_quick
98
100
def test_kmeans_airflow_config_uploads_data_source_to_s3 (sagemaker_session , cpu_instance_type ):
99
101
with timeout (seconds = AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS ):
100
102
data_path = os .path .join (DATA_DIR , "one_p_mnist" , "mnist.pkl.gz" )
@@ -132,6 +134,7 @@ def test_kmeans_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_
132
134
)
133
135
134
136
137
+ @pytest .mark .canary_quick
135
138
def test_fm_airflow_config_uploads_data_source_to_s3 (sagemaker_session , cpu_instance_type ):
136
139
with timeout (seconds = AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS ):
137
140
data_path = os .path .join (DATA_DIR , "one_p_mnist" , "mnist.pkl.gz" )
@@ -164,6 +167,7 @@ def test_fm_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_inst
164
167
)
165
168
166
169
170
+ @pytest .mark .canary_quick
167
171
def test_ipinsights_airflow_config_uploads_data_source_to_s3 (sagemaker_session , cpu_instance_type ):
168
172
with timeout (seconds = AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS ):
169
173
data_path = os .path .join (DATA_DIR , "ipinsights" )
@@ -193,6 +197,7 @@ def test_ipinsights_airflow_config_uploads_data_source_to_s3(sagemaker_session,
193
197
)
194
198
195
199
200
+ @pytest .mark .canary_quick
196
201
def test_knn_airflow_config_uploads_data_source_to_s3 (sagemaker_session , cpu_instance_type ):
197
202
with timeout (seconds = AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS ):
198
203
data_path = os .path .join (DATA_DIR , "one_p_mnist" , "mnist.pkl.gz" )
@@ -222,6 +227,7 @@ def test_knn_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_ins
222
227
)
223
228
224
229
230
+ @pytest .mark .canary_quick
225
231
def test_lda_airflow_config_uploads_data_source_to_s3 (sagemaker_session , cpu_instance_type ):
226
232
with timeout (seconds = AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS ):
227
233
data_path = os .path .join (DATA_DIR , "lda" )
@@ -252,6 +258,7 @@ def test_lda_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_ins
252
258
)
253
259
254
260
261
+ @pytest .mark .canary_quick
255
262
def test_linearlearner_airflow_config_uploads_data_source_to_s3 (
256
263
sagemaker_session , cpu_instance_type
257
264
):
@@ -320,6 +327,7 @@ def test_linearlearner_airflow_config_uploads_data_source_to_s3(
320
327
)
321
328
322
329
330
+ @pytest .mark .canary_quick
323
331
def test_ntm_airflow_config_uploads_data_source_to_s3 (sagemaker_session , cpu_instance_type ):
324
332
with timeout (seconds = AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS ):
325
333
data_path = os .path .join (DATA_DIR , "ntm" )
@@ -351,6 +359,7 @@ def test_ntm_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_ins
351
359
)
352
360
353
361
362
+ @pytest .mark .canary_quick
354
363
def test_pca_airflow_config_uploads_data_source_to_s3 (sagemaker_session , cpu_instance_type ):
355
364
with timeout (seconds = AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS ):
356
365
data_path = os .path .join (DATA_DIR , "one_p_mnist" , "mnist.pkl.gz" )
@@ -382,6 +391,7 @@ def test_pca_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_ins
382
391
)
383
392
384
393
394
+ @pytest .mark .canary_quick
385
395
def test_rcf_airflow_config_uploads_data_source_to_s3 (sagemaker_session , cpu_instance_type ):
386
396
with timeout (seconds = AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS ):
387
397
# Generate a thousand 14-dimensional datapoints.
@@ -408,6 +418,7 @@ def test_rcf_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_ins
408
418
)
409
419
410
420
421
+ @pytest .mark .canary_quick
411
422
def test_chainer_airflow_config_uploads_data_source_to_s3 (sagemaker_session , chainer_full_version ):
412
423
with timeout (seconds = AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS ):
413
424
script_path = os .path .join (DATA_DIR , "chainer_mnist" , "mnist.py" )
@@ -441,6 +452,7 @@ def test_chainer_airflow_config_uploads_data_source_to_s3(sagemaker_session, cha
441
452
)
442
453
443
454
455
+ @pytest .mark .canary_quick
444
456
def test_mxnet_airflow_config_uploads_data_source_to_s3 (
445
457
sagemaker_session , cpu_instance_type , mxnet_full_version
446
458
):
@@ -469,6 +481,7 @@ def test_mxnet_airflow_config_uploads_data_source_to_s3(
469
481
)
470
482
471
483
484
+ @pytest .mark .canary_quick
472
485
def test_sklearn_airflow_config_uploads_data_source_to_s3 (
473
486
sagemaker_session , cpu_instance_type , sklearn_full_version
474
487
):
@@ -503,14 +516,13 @@ def test_sklearn_airflow_config_uploads_data_source_to_s3(
503
516
)
504
517
505
518
519
+ @pytest .mark .canary_quick
506
520
def test_tf_airflow_config_uploads_data_source_to_s3 (sagemaker_session , cpu_instance_type ):
507
521
with timeout (seconds = AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS ):
508
- image_name = (
509
- registry (sagemaker_session .boto_session .region_name ) + "/factorization-machines:1"
510
- )
511
-
512
522
tf = TensorFlow (
513
- image_name = image_name ,
523
+ image_name = get_image_uri (
524
+ sagemaker_session .boto_session .region_name , "factorization-machines"
525
+ ),
514
526
entry_point = SCRIPT ,
515
527
role = ROLE ,
516
528
train_instance_count = SINGLE_INSTANCE_COUNT ,
@@ -535,6 +547,7 @@ def test_tf_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_inst
535
547
)
536
548
537
549
550
+ @pytest .mark .canary_quick
538
551
def test_xgboost_airflow_config_uploads_data_source_to_s3 (sagemaker_session , cpu_instance_type ):
539
552
with timeout (seconds = AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS ):
540
553
@@ -559,6 +572,7 @@ def test_xgboost_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu
559
572
)
560
573
561
574
575
+ @pytest .mark .canary_quick
562
576
def test_pytorch_airflow_config_uploads_data_source_to_s3_when_inputs_not_provided (
563
577
sagemaker_session , cpu_instance_type
564
578
):
0 commit comments