Skip to content

fix: prevent integration test's timeout functions from hiding failures #968

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Aug 7, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions tests/integ/test_inference_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,10 @@ def test_inference_pipeline_batch_transform(sagemaker_session):

@pytest.mark.canary_quick
@pytest.mark.regional_testing
@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_inference_pipeline_model_deploy(sagemaker_session):
sparkml_data_path = os.path.join(DATA_DIR, "sparkml_model")
xgboost_data_path = os.path.join(DATA_DIR, "xgboost_model")
Expand Down
6 changes: 6 additions & 0 deletions tests/integ/test_ipinsights.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@

import os

import pytest

from sagemaker import IPInsights, IPInsightsModel
from sagemaker.predictor import RealTimePredictor
from sagemaker.utils import unique_name_from_base
Expand All @@ -24,6 +26,10 @@
FEATURE_DIM = None


@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_ipinsights(sagemaker_session):
job_name = unique_name_from_base("ipinsights")

Expand Down
4 changes: 4 additions & 0 deletions tests/integ/test_marketplace.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,10 @@


@pytest.mark.canary_quick
@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_marketplace_estimator(sagemaker_session):
with timeout(minutes=15):
data_path = os.path.join(DATA_DIR, "marketplace", "training")
Expand Down
12 changes: 12 additions & 0 deletions tests/integ/test_mxnet_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,10 @@ def test_deploy_model(mxnet_training_job, sagemaker_session, mxnet_full_version)
assert "Could not find model" in str(exception.value)


@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_deploy_model_with_tags_and_kms(mxnet_training_job, sagemaker_session, mxnet_full_version):
endpoint_name = "test-mxnet-deploy-model-{}".format(sagemaker_timestamp())

Expand Down Expand Up @@ -144,6 +148,10 @@ def test_deploy_model_with_tags_and_kms(mxnet_training_job, sagemaker_session, m
assert endpoint_config["KmsKeyId"] == kms_key_arn


@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_deploy_model_with_update_endpoint(
mxnet_training_job, sagemaker_session, mxnet_full_version
):
Expand Down Expand Up @@ -180,6 +188,10 @@ def test_deploy_model_with_update_endpoint(
assert new_production_variants["AcceleratorType"] is None


@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_deploy_model_with_update_non_existing_endpoint(
mxnet_training_job, sagemaker_session, mxnet_full_version
):
Expand Down
4 changes: 4 additions & 0 deletions tests/integ/test_ntm.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@


@pytest.mark.canary_quick
@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_ntm(sagemaker_session):
job_name = unique_name_from_base("ntm")

Expand Down
6 changes: 6 additions & 0 deletions tests/integ/test_object2vec.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@

import os

import pytest

from sagemaker.predictor import RealTimePredictor
from sagemaker import Object2Vec, Object2VecModel
from sagemaker.utils import unique_name_from_base
Expand All @@ -24,6 +26,10 @@
FEATURE_NUM = None


@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_object2vec(sagemaker_session):
job_name = unique_name_from_base("object2vec")

Expand Down
12 changes: 12 additions & 0 deletions tests/integ/test_sklearn_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,10 @@ def test_training_with_network_isolation(sagemaker_session, sklearn_full_version
@pytest.mark.canary_quick
@pytest.mark.regional_testing
@pytest.mark.skipif(PYTHON_VERSION != "py3", reason="Scikit-learn image supports only python 3.")
@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_attach_deploy(sklearn_training_job, sagemaker_session):
endpoint_name = "test-sklearn-attach-deploy-{}".format(sagemaker_timestamp())

Expand All @@ -104,6 +108,10 @@ def test_attach_deploy(sklearn_training_job, sagemaker_session):


@pytest.mark.skipif(PYTHON_VERSION != "py3", reason="Scikit-learn image supports only python 3.")
@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_deploy_model(sklearn_training_job, sagemaker_session):
endpoint_name = "test-sklearn-deploy-model-{}".format(sagemaker_timestamp())
with timeout_and_delete_endpoint_by_name(endpoint_name, sagemaker_session):
Expand All @@ -123,6 +131,10 @@ def test_deploy_model(sklearn_training_job, sagemaker_session):


@pytest.mark.skipif(PYTHON_VERSION != "py3", reason="Scikit-learn image supports only python 3.")
@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_async_fit(sagemaker_session):
endpoint_name = "test-sklearn-attach-deploy-{}".format(sagemaker_timestamp())

Expand Down
4 changes: 4 additions & 0 deletions tests/integ/test_sparkml_serving.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@

@pytest.mark.canary_quick
@pytest.mark.regional_testing
@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_sparkml_model_deploy(sagemaker_session):
# Uploads an MLeap serialized MLeap model to S3 and use that to deploy a SparkML model to perform inference
data_path = os.path.join(DATA_DIR, "sparkml_model")
Expand Down
4 changes: 4 additions & 0 deletions tests/integ/test_tf_script_mode.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,10 @@ def test_mnist_distributed(sagemaker_session, instance_type):
)


@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_mnist_async(sagemaker_session):
estimator = TensorFlow(
entry_point=SCRIPT,
Expand Down
4 changes: 4 additions & 0 deletions tests/integ/test_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -803,6 +803,10 @@ def test_tuning_chainer(sagemaker_session):


@pytest.mark.canary_quick
@pytest.mark.skip(
reason="This test has always failed, but the failure was masked by a bug. "
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
)
def test_attach_tuning_pytorch(sagemaker_session):
mnist_dir = os.path.join(DATA_DIR, "pytorch_mnist")
mnist_script = os.path.join(mnist_dir, "mnist.py")
Expand Down
17 changes: 11 additions & 6 deletions tests/integ/timeout.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,12 @@ def timeout(seconds=0, minutes=0, hours=0):

@contextmanager
def timeout_and_delete_endpoint_by_name(
endpoint_name, sagemaker_session, seconds=0, minutes=45, hours=0
endpoint_name,
sagemaker_session,
seconds=0,
minutes=45,
hours=0,
sleep_between_cleanup_attempts=10,
):
limit = seconds + 60 * minutes + 3600 * hours

Expand All @@ -67,18 +72,18 @@ def timeout_and_delete_endpoint_by_name(
_show_logs(endpoint_name, "Endpoints", sagemaker_session)
if no_errors:
_cleanup_logs(endpoint_name, "Endpoints", sagemaker_session)
return
break
except ClientError as ce:
if ce.response["Error"]["Code"] == "ValidationException":
# avoids the inner exception to be overwritten
pass
# trying to delete the resource again in 10 seconds
sleep(10)
sleep(sleep_between_cleanup_attempts)


@contextmanager
def timeout_and_delete_model_with_transformer(
transformer, sagemaker_session, seconds=0, minutes=0, hours=0
transformer, sagemaker_session, seconds=0, minutes=0, hours=0, sleep_between_cleanup_attempts=10
):
limit = seconds + 60 * minutes + 3600 * hours

Expand All @@ -99,11 +104,11 @@ def timeout_and_delete_model_with_transformer(
_show_logs(transformer.model_name, "Models", sagemaker_session)
if no_errors:
_cleanup_logs(transformer.model_name, "Models", sagemaker_session)
return
break
except ClientError as ce:
if ce.response["Error"]["Code"] == "ValidationException":
pass
sleep(10)
sleep(sleep_between_cleanup_attempts)


def _show_logs(resource_name, resource_type, sagemaker_session):
Expand Down
Loading