Skip to content

Commit 4528b39

Browse files
committed
trying to see if it works with slow tests
1 parent 31892be commit 4528b39

File tree

2 files changed

+34
-34
lines changed

2 files changed

+34
-34
lines changed

tests/integ/sagemaker/serve/constants.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,8 @@
2323
SERVE_SAGEMAKER_ENDPOINT_TIMEOUT = 15
2424
SERVE_SAVE_TIMEOUT = 2
2525

26-
NOT_RUNNING_ON_PY38 = platform.python_version_tuple()[1] != "8"
27-
NOT_RUNNING_ON_PY310 = platform.python_version_tuple()[1] != "10"
28-
NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE = os.getenv("TEST_OWNER") != "INF_EXP_DEV"
26+
PYTHON_VERSION_IS_38 = platform.python_version_tuple()[1] != "8"
27+
PYTHON_VERSION_IS_310 = platform.python_version_tuple()[1] != "10"
2928

3029
XGB_RESOURCE_DIR = os.path.join(DATA_DIR, "serve_resources", "xgboost")
3130
PYTORCH_SQUEEZENET_RESOURCE_DIR = os.path.join(DATA_DIR, "serve_resources", "pytorch")

tests/integ/sagemaker/serve/test_serve_pt_happy.py

Lines changed: 32 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
from tests.integ.sagemaker.serve.constants import (
2929
PYTORCH_SQUEEZENET_RESOURCE_DIR,
3030
SERVE_SAGEMAKER_ENDPOINT_TIMEOUT,
31-
NOT_RUNNING_ON_PY310,
31+
PYTHON_VERSION_IS_310,
3232
)
3333
from tests.integ.timeout import timeout
3434
from tests.integ.utils import cleanup_model_resources
@@ -148,44 +148,45 @@ def model_builder(request):
148148
return request.getfixturevalue(request.param)
149149

150150

151-
# @pytest.mark.skipif(
152-
# NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE or NOT_RUNNING_ON_PY310,
153-
# reason="The goal of these test are to test the serving components of our feature",
154-
# )
155-
# @pytest.mark.parametrize(
156-
# "model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True
157-
# )
158-
# def test_happy_pytorch_local_container(sagemaker_session, model_builder, test_image):
159-
# logger.info("Running in LOCAL_CONTAINER mode...")
160-
# caught_ex = None
151+
@pytest.mark.skipif(
152+
PYTHON_VERSION_IS_310,
153+
reason="The goal of these test are to test the serving components of our feature",
154+
)
155+
@pytest.mark.parametrize(
156+
"model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True
157+
)
158+
def test_happy_pytorch_local_container(sagemaker_session, model_builder, test_image):
159+
logger.info("Running in LOCAL_CONTAINER mode...")
160+
caught_ex = None
161161

162-
# model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session)
162+
model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session)
163163

164-
# with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT):
165-
# try:
166-
# logger.info("Deploying and predicting in LOCAL_CONTAINER mode...")
167-
# predictor = model.deploy()
168-
# logger.info("Local container successfully deployed.")
169-
# predictor.predict(test_image)
170-
# except Exception as e:
171-
# logger.exception("test failed")
172-
# caught_ex = e
173-
# finally:
174-
# if model.modes[str(Mode.LOCAL_CONTAINER)].container:
175-
# model.modes[str(Mode.LOCAL_CONTAINER)].container.kill()
176-
# if caught_ex:
177-
# assert (
178-
# False
179-
# ), f"{caught_ex} was thrown when running pytorch squeezenet local container test"
164+
with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT):
165+
try:
166+
logger.info("Deploying and predicting in LOCAL_CONTAINER mode...")
167+
predictor = model.deploy()
168+
logger.info("Local container successfully deployed.")
169+
predictor.predict(test_image)
170+
except Exception as e:
171+
logger.exception("test failed")
172+
caught_ex = e
173+
finally:
174+
if model.modes[str(Mode.LOCAL_CONTAINER)].container:
175+
model.modes[str(Mode.LOCAL_CONTAINER)].container.kill()
176+
if caught_ex:
177+
assert (
178+
False
179+
), f"{caught_ex} was thrown when running pytorch squeezenet local container test"
180180

181181

182182
@pytest.mark.skipif(
183-
NOT_RUNNING_ON_PY310, # or NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE,
183+
PYTHON_VERSION_IS_310, # or NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE,
184184
reason="The goal of these test are to test the serving components of our feature",
185185
)
186186
@pytest.mark.parametrize(
187187
"model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True
188188
)
189+
@pytest.mark.slow_test
189190
def test_happy_pytorch_sagemaker_endpoint(
190191
sagemaker_session, model_builder, cpu_instance_type, test_image
191192
):
@@ -221,7 +222,7 @@ def test_happy_pytorch_sagemaker_endpoint(
221222

222223

223224
# @pytest.mark.skipif(
224-
# NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE or NOT_RUNNING_ON_PY310,
225+
# NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE or PYTHON_VERSION_IS_310,
225226
# reason="The goal of these test are to test the serving components of our feature",
226227
# )
227228
# @pytest.mark.parametrize(
@@ -267,7 +268,7 @@ def test_happy_pytorch_sagemaker_endpoint(
267268

268269

269270
# @pytest.mark.skipif(
270-
# NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE or NOT_RUNNING_ON_PY310,
271+
# NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE or PYTHON_VERSION_IS_310,
271272
# reason="The goal of these test are to test the serving components of our feature",
272273
# )
273274
# @pytest.mark.parametrize(

0 commit comments

Comments
 (0)