Skip to content

Commit 7386979

Browse files
authored
change: add airflow_config tests to canaries (#1055)
This change also addresses minor refactoring comments on previous PR.
1 parent 453e939 commit 7386979

File tree

3 files changed

+29
-15
lines changed

3 files changed

+29
-15
lines changed

tests/integ/test_airflow_config.py

Lines changed: 24 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
import os
1717
import pickle
1818
import sys
19+
import pytest
1920

2021
import numpy as np
2122

@@ -30,7 +31,7 @@
3031
PCA,
3132
RandomCutForest,
3233
)
33-
from sagemaker.amazon.amazon_estimator import registry
34+
from sagemaker.amazon.amazon_estimator import get_image_uri
3435
from sagemaker.amazon.common import read_records
3536
from sagemaker.chainer import Chainer
3637
from sagemaker.estimator import Estimator
@@ -65,13 +66,11 @@
6566
SINGLE_INSTANCE_COUNT = 1
6667

6768

69+
@pytest.mark.canary_quick
6870
def test_byo_airflow_config_uploads_data_source_to_s3_when_inputs_provided(
6971
sagemaker_session, cpu_instance_type
7072
):
7173
with timeout(seconds=AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS):
72-
image_name = (
73-
registry(sagemaker_session.boto_session.region_name) + "/factorization-machines:1"
74-
)
7574
training_data_path = os.path.join(DATA_DIR, "dummy_tensor")
7675

7776
data_source_location = "test-airflow-config-{}".format(sagemaker_timestamp())
@@ -80,7 +79,9 @@ def test_byo_airflow_config_uploads_data_source_to_s3_when_inputs_provided(
8079
)
8180

8281
estimator = Estimator(
83-
image_name=image_name,
82+
image_name=get_image_uri(
83+
sagemaker_session.boto_session.region_name, "factorization-machines"
84+
),
8485
role=ROLE,
8586
train_instance_count=SINGLE_INSTANCE_COUNT,
8687
train_instance_type=cpu_instance_type,
@@ -95,6 +96,7 @@ def test_byo_airflow_config_uploads_data_source_to_s3_when_inputs_provided(
9596
)
9697

9798

99+
@pytest.mark.canary_quick
98100
def test_kmeans_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_instance_type):
99101
with timeout(seconds=AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS):
100102
data_path = os.path.join(DATA_DIR, "one_p_mnist", "mnist.pkl.gz")
@@ -132,6 +134,7 @@ def test_kmeans_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_
132134
)
133135

134136

137+
@pytest.mark.canary_quick
135138
def test_fm_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_instance_type):
136139
with timeout(seconds=AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS):
137140
data_path = os.path.join(DATA_DIR, "one_p_mnist", "mnist.pkl.gz")
@@ -164,6 +167,7 @@ def test_fm_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_inst
164167
)
165168

166169

170+
@pytest.mark.canary_quick
167171
def test_ipinsights_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_instance_type):
168172
with timeout(seconds=AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS):
169173
data_path = os.path.join(DATA_DIR, "ipinsights")
@@ -193,6 +197,7 @@ def test_ipinsights_airflow_config_uploads_data_source_to_s3(sagemaker_session,
193197
)
194198

195199

200+
@pytest.mark.canary_quick
196201
def test_knn_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_instance_type):
197202
with timeout(seconds=AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS):
198203
data_path = os.path.join(DATA_DIR, "one_p_mnist", "mnist.pkl.gz")
@@ -222,6 +227,7 @@ def test_knn_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_ins
222227
)
223228

224229

230+
@pytest.mark.canary_quick
225231
def test_lda_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_instance_type):
226232
with timeout(seconds=AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS):
227233
data_path = os.path.join(DATA_DIR, "lda")
@@ -252,6 +258,7 @@ def test_lda_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_ins
252258
)
253259

254260

261+
@pytest.mark.canary_quick
255262
def test_linearlearner_airflow_config_uploads_data_source_to_s3(
256263
sagemaker_session, cpu_instance_type
257264
):
@@ -320,6 +327,7 @@ def test_linearlearner_airflow_config_uploads_data_source_to_s3(
320327
)
321328

322329

330+
@pytest.mark.canary_quick
323331
def test_ntm_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_instance_type):
324332
with timeout(seconds=AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS):
325333
data_path = os.path.join(DATA_DIR, "ntm")
@@ -351,6 +359,7 @@ def test_ntm_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_ins
351359
)
352360

353361

362+
@pytest.mark.canary_quick
354363
def test_pca_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_instance_type):
355364
with timeout(seconds=AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS):
356365
data_path = os.path.join(DATA_DIR, "one_p_mnist", "mnist.pkl.gz")
@@ -382,6 +391,7 @@ def test_pca_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_ins
382391
)
383392

384393

394+
@pytest.mark.canary_quick
385395
def test_rcf_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_instance_type):
386396
with timeout(seconds=AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS):
387397
# Generate a thousand 14-dimensional datapoints.
@@ -408,6 +418,7 @@ def test_rcf_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_ins
408418
)
409419

410420

421+
@pytest.mark.canary_quick
411422
def test_chainer_airflow_config_uploads_data_source_to_s3(sagemaker_session, chainer_full_version):
412423
with timeout(seconds=AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS):
413424
script_path = os.path.join(DATA_DIR, "chainer_mnist", "mnist.py")
@@ -441,6 +452,7 @@ def test_chainer_airflow_config_uploads_data_source_to_s3(sagemaker_session, cha
441452
)
442453

443454

455+
@pytest.mark.canary_quick
444456
def test_mxnet_airflow_config_uploads_data_source_to_s3(
445457
sagemaker_session, cpu_instance_type, mxnet_full_version
446458
):
@@ -469,6 +481,7 @@ def test_mxnet_airflow_config_uploads_data_source_to_s3(
469481
)
470482

471483

484+
@pytest.mark.canary_quick
472485
def test_sklearn_airflow_config_uploads_data_source_to_s3(
473486
sagemaker_session, cpu_instance_type, sklearn_full_version
474487
):
@@ -503,14 +516,13 @@ def test_sklearn_airflow_config_uploads_data_source_to_s3(
503516
)
504517

505518

519+
@pytest.mark.canary_quick
506520
def test_tf_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_instance_type):
507521
with timeout(seconds=AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS):
508-
image_name = (
509-
registry(sagemaker_session.boto_session.region_name) + "/factorization-machines:1"
510-
)
511-
512522
tf = TensorFlow(
513-
image_name=image_name,
523+
image_name=get_image_uri(
524+
sagemaker_session.boto_session.region_name, "factorization-machines"
525+
),
514526
entry_point=SCRIPT,
515527
role=ROLE,
516528
train_instance_count=SINGLE_INSTANCE_COUNT,
@@ -535,6 +547,7 @@ def test_tf_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_inst
535547
)
536548

537549

550+
@pytest.mark.canary_quick
538551
def test_xgboost_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu_instance_type):
539552
with timeout(seconds=AIRFLOW_CONFIG_TIMEOUT_IN_SECONDS):
540553

@@ -559,6 +572,7 @@ def test_xgboost_airflow_config_uploads_data_source_to_s3(sagemaker_session, cpu
559572
)
560573

561574

575+
@pytest.mark.canary_quick
562576
def test_pytorch_airflow_config_uploads_data_source_to_s3_when_inputs_not_provided(
563577
sagemaker_session, cpu_instance_type
564578
):

tests/integ/test_byo_estimator.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
import pytest
2222

2323
import sagemaker
24-
from sagemaker.amazon.amazon_estimator import registry
24+
from sagemaker.amazon.amazon_estimator import get_image_uri
2525
from sagemaker.estimator import Estimator
2626
from sagemaker.utils import unique_name_from_base
2727
from tests.integ import DATA_DIR, TRAINING_DEFAULT_TIMEOUT_MINUTES
@@ -52,7 +52,7 @@ def test_byo_estimator(sagemaker_session, region, cpu_instance_type):
5252
Default predictor is updated with json serializer and deserializer.
5353
5454
"""
55-
image_name = registry(region) + "/factorization-machines:1"
55+
image_name = get_image_uri(region, "factorization-machines")
5656
training_data_path = os.path.join(DATA_DIR, "dummy_tensor")
5757
job_name = unique_name_from_base("byo")
5858

@@ -100,7 +100,7 @@ def test_byo_estimator(sagemaker_session, region, cpu_instance_type):
100100

101101

102102
def test_async_byo_estimator(sagemaker_session, region, cpu_instance_type):
103-
image_name = registry(region) + "/factorization-machines:1"
103+
image_name = get_image_uri(region, "factorization-machines")
104104
endpoint_name = unique_name_from_base("byo")
105105
training_data_path = os.path.join(DATA_DIR, "dummy_tensor")
106106
job_name = unique_name_from_base("byo")

tests/integ/test_tuner.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
from tests.integ import vpc_test_utils
3030

3131
from sagemaker import KMeans, LDA, RandomCutForest
32-
from sagemaker.amazon.amazon_estimator import registry
32+
from sagemaker.amazon.amazon_estimator import get_image_uri
3333
from sagemaker.amazon.common import read_records
3434
from sagemaker.chainer import Chainer
3535
from sagemaker.estimator import Estimator
@@ -891,7 +891,7 @@ def test_tuning_byo_estimator(sagemaker_session, cpu_instance_type):
891891
Later the trained model is deployed and prediction is called against the endpoint.
892892
Default predictor is updated with json serializer and deserializer.
893893
"""
894-
image_name = registry(sagemaker_session.boto_session.region_name) + "/factorization-machines:1"
894+
image_name = get_image_uri(sagemaker_session.boto_session.region_name, "factorization-machines")
895895
training_data_path = os.path.join(DATA_DIR, "dummy_tensor")
896896

897897
with timeout(minutes=TUNING_DEFAULT_TIMEOUT_MINUTES):

0 commit comments

Comments
 (0)