Skip to content

Commit 65f1f15

Browse files
authored
fix region error in test (#534)
1 parent cecea12 commit 65f1f15

File tree

1 file changed

+13
-11
lines changed

1 file changed

+13
-11
lines changed

tests/integ/test_inference_pipeline.py

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -16,30 +16,30 @@
1616
import os
1717

1818
import pytest
19+
from tests.integ import DATA_DIR
20+
from tests.integ.timeout import timeout_and_delete_endpoint_by_name
1921

2022
from sagemaker.amazon.amazon_estimator import get_image_uri
2123
from sagemaker.content_types import CONTENT_TYPE_CSV
2224
from sagemaker.model import Model
2325
from sagemaker.pipeline import PipelineModel
2426
from sagemaker.predictor import RealTimePredictor, json_serializer
25-
from sagemaker.session import Session
2627
from sagemaker.sparkml.model import SparkMLModel
2728
from sagemaker.utils import sagemaker_timestamp
28-
from tests.integ import DATA_DIR
29-
from tests.integ.timeout import timeout_and_delete_endpoint_by_name
3029

3130

3231
@pytest.mark.continuous_testing
3332
@pytest.mark.regional_testing
3433
def test_inference_pipeline_model_deploy(sagemaker_session):
35-
# Creates a Pipeline model comprising of SparkML (serialized by MLeap) and XGBoost and deploys to one endpoint
3634
sparkml_data_path = os.path.join(DATA_DIR, 'sparkml_model')
3735
xgboost_data_path = os.path.join(DATA_DIR, 'xgboost_model')
3836
endpoint_name = 'test-inference-pipeline-deploy-{}'.format(sagemaker_timestamp())
39-
sparkml_model_data = sagemaker_session.upload_data(path=os.path.join(sparkml_data_path, 'mleap_model.tar.gz'),
40-
key_prefix='integ-test-data/sparkml/model')
41-
xgb_model_data = sagemaker_session.upload_data(path=os.path.join(xgboost_data_path, 'xgb_model.tar.gz'),
42-
key_prefix='integ-test-data/xgboost/model')
37+
sparkml_model_data = sagemaker_session.upload_data(
38+
path=os.path.join(sparkml_data_path, 'mleap_model.tar.gz'),
39+
key_prefix='integ-test-data/sparkml/model')
40+
xgb_model_data = sagemaker_session.upload_data(
41+
path=os.path.join(xgboost_data_path, 'xgb_model.tar.gz'),
42+
key_prefix='integ-test-data/xgboost/model')
4343
schema = json.dumps({
4444
"input": [
4545
{
@@ -74,10 +74,12 @@ def test_inference_pipeline_model_deploy(sagemaker_session):
7474
}
7575
})
7676
with timeout_and_delete_endpoint_by_name(endpoint_name, sagemaker_session):
77-
sparkml_model = SparkMLModel(model_data=sparkml_model_data, env={'SAGEMAKER_SPARKML_SCHEMA': schema},
77+
sparkml_model = SparkMLModel(model_data=sparkml_model_data,
78+
env={'SAGEMAKER_SPARKML_SCHEMA': schema},
7879
sagemaker_session=sagemaker_session)
79-
xgb_image = get_image_uri(Session().boto_region_name, 'xgboost')
80-
xgb_model = Model(model_data=xgb_model_data, image=xgb_image, sagemaker_session=sagemaker_session)
80+
xgb_image = get_image_uri(sagemaker_session.boto_region_name, 'xgboost')
81+
xgb_model = Model(model_data=xgb_model_data, image=xgb_image,
82+
sagemaker_session=sagemaker_session)
8183
model = PipelineModel(models=[sparkml_model, xgb_model], role='SageMakerRole',
8284
sagemaker_session=sagemaker_session, name=endpoint_name)
8385
model.deploy(1, 'ml.m4.xlarge', endpoint_name=endpoint_name)

0 commit comments

Comments
 (0)