|
28 | 28 | from tests.integ.sagemaker.serve.constants import (
|
29 | 29 | PYTORCH_SQUEEZENET_RESOURCE_DIR,
|
30 | 30 | SERVE_SAGEMAKER_ENDPOINT_TIMEOUT,
|
31 |
| - # SERVE_LOCAL_CONTAINER_TIMEOUT, |
| 31 | + SERVE_LOCAL_CONTAINER_TIMEOUT, |
32 | 32 | PYTHON_VERSION_IS_NOT_310,
|
33 | 33 | )
|
34 | 34 | from tests.integ.timeout import timeout
|
@@ -149,36 +149,36 @@ def model_builder(request):
|
149 | 149 | return request.getfixturevalue(request.param)
|
150 | 150 |
|
151 | 151 |
|
152 |
| -# @pytest.mark.skipif( |
153 |
| -# PYTHON_VERSION_IS_NOT_310, |
154 |
| -# reason="The goal of these test are to test the serving components of our feature", |
155 |
| -# ) |
156 |
| -# @pytest.mark.parametrize( |
157 |
| -# "model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True |
158 |
| -# ) |
159 |
| -# @pytest.mark.slow_test |
160 |
| -# def test_happy_pytorch_local_container(sagemaker_session, model_builder, test_image): |
161 |
| -# logger.info("Running in LOCAL_CONTAINER mode...") |
162 |
| -# caught_ex = None |
163 |
| -# |
164 |
| -# model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session) |
165 |
| -# |
166 |
| -# with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT): |
167 |
| -# try: |
168 |
| -# logger.info("Deploying and predicting in LOCAL_CONTAINER mode...") |
169 |
| -# predictor = model.deploy() |
170 |
| -# logger.info("Local container successfully deployed.") |
171 |
| -# predictor.predict(test_image) |
172 |
| -# except Exception as e: |
173 |
| -# logger.exception("test failed") |
174 |
| -# caught_ex = e |
175 |
| -# finally: |
176 |
| -# if model.modes[str(Mode.LOCAL_CONTAINER)].container: |
177 |
| -# model.modes[str(Mode.LOCAL_CONTAINER)].container.kill() |
178 |
| -# if caught_ex: |
179 |
| -# assert ( |
180 |
| -# False |
181 |
| -# ), f"{caught_ex} was thrown when running pytorch squeezenet local container test" |
| 152 | +@pytest.mark.skipif( |
| 153 | + PYTHON_VERSION_IS_NOT_310, |
| 154 | + reason="The goal of these test are to test the serving components of our feature", |
| 155 | +) |
| 156 | +@pytest.mark.parametrize( |
| 157 | + "model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True |
| 158 | +) |
| 159 | +@pytest.mark.slow_test |
| 160 | +def test_happy_pytorch_local_container(sagemaker_session, model_builder, test_image): |
| 161 | + logger.info("Running in LOCAL_CONTAINER mode...") |
| 162 | + caught_ex = None |
| 163 | + |
| 164 | + model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session) |
| 165 | + |
| 166 | + with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT): |
| 167 | + try: |
| 168 | + logger.info("Deploying and predicting in LOCAL_CONTAINER mode...") |
| 169 | + predictor = model.deploy() |
| 170 | + logger.info("Local container successfully deployed.") |
| 171 | + predictor.predict(test_image) |
| 172 | + except Exception as e: |
| 173 | + logger.exception("test failed") |
| 174 | + caught_ex = e |
| 175 | + finally: |
| 176 | + if model.modes[str(Mode.LOCAL_CONTAINER)].container: |
| 177 | + model.modes[str(Mode.LOCAL_CONTAINER)].container.kill() |
| 178 | + if caught_ex: |
| 179 | + assert ( |
| 180 | + False |
| 181 | + ), f"{caught_ex} was thrown when running pytorch squeezenet local container test" |
182 | 182 |
|
183 | 183 |
|
184 | 184 | @pytest.mark.skipif(
|
@@ -223,85 +223,85 @@ def test_happy_pytorch_sagemaker_endpoint(
|
223 | 223 | ), f"{caught_ex} was thrown when running pytorch squeezenet sagemaker endpoint test"
|
224 | 224 |
|
225 | 225 |
|
226 |
| -# @pytest.mark.skipif( |
227 |
| -# NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE or PYTHON_VERSION_IS_NOT_310, |
228 |
| -# reason="The goal of these test are to test the serving components of our feature", |
229 |
| -# ) |
230 |
| -# @pytest.mark.parametrize( |
231 |
| -# "model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True |
232 |
| -# ) |
233 |
| -# def test_happy_pytorch_local_container_overwrite_to_sagemaker_endpoint( |
234 |
| -# sagemaker_session, model_builder, cpu_instance_type, test_image |
235 |
| -# ): |
236 |
| -# logger.info("Building model in LOCAL_CONTAINER mode...") |
237 |
| -# caught_ex = None |
238 |
| - |
239 |
| -# iam_client = sagemaker_session.boto_session.client("iam") |
240 |
| -# role_arn = iam_client.get_role(RoleName=ROLE_NAME)["Role"]["Arn"] |
241 |
| -# logger.debug("Role arn: %s", role_arn) |
242 |
| - |
243 |
| -# model = model_builder.build( |
244 |
| -# mode=Mode.LOCAL_CONTAINER, role_arn=role_arn, sagemaker_session=sagemaker_session |
245 |
| -# ) |
246 |
| - |
247 |
| -# with timeout(minutes=SERVE_SAGEMAKER_ENDPOINT_TIMEOUT): |
248 |
| -# try: |
249 |
| -# logger.info("Deploying and predicting in SAGEMAKER_ENDPOINT mode...") |
250 |
| -# predictor = model.deploy( |
251 |
| -# instance_type=cpu_instance_type, |
252 |
| -# initial_instance_count=1, |
253 |
| -# mode=Mode.SAGEMAKER_ENDPOINT, |
254 |
| -# ) |
255 |
| -# logger.info("Endpoint successfully deployed.") |
256 |
| -# predictor.predict(test_image) |
257 |
| -# except Exception as e: |
258 |
| -# caught_ex = e |
259 |
| -# finally: |
260 |
| -# cleanup_model_resources( |
261 |
| -# sagemaker_session=model_builder.sagemaker_session, |
262 |
| -# model_name=model.name, |
263 |
| -# endpoint_name=model.endpoint_name, |
264 |
| -# ) |
265 |
| -# if caught_ex: |
266 |
| -# logger.exception(caught_ex) |
267 |
| -# assert ( |
268 |
| -# False |
269 |
| -# ), f"{caught_ex} was thrown when running pytorch squeezenet sagemaker endpoint test" |
270 |
| - |
271 |
| - |
272 |
| -# @pytest.mark.skipif( |
273 |
| -# NOT_RUNNING_ON_INF_EXP_DEV_PIPELINE or PYTHON_VERSION_IS_NOT_310, |
274 |
| -# reason="The goal of these test are to test the serving components of our feature", |
275 |
| -# ) |
276 |
| -# @pytest.mark.parametrize( |
277 |
| -# "model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True |
278 |
| -# ) |
279 |
| -# def test_happy_pytorch_sagemaker_endpoint_overwrite_to_local_container( |
280 |
| -# sagemaker_session, model_builder, test_image |
281 |
| -# ): |
282 |
| -# logger.info("Building model in SAGEMAKER_ENDPOINT mode...") |
283 |
| -# caught_ex = None |
284 |
| - |
285 |
| -# iam_client = sagemaker_session.boto_session.client("iam") |
286 |
| -# role_arn = iam_client.get_role(RoleName=ROLE_NAME)["Role"]["Arn"] |
287 |
| - |
288 |
| -# model = model_builder.build( |
289 |
| -# mode=Mode.SAGEMAKER_ENDPOINT, role_arn=role_arn, sagemaker_session=sagemaker_session |
290 |
| -# ) |
291 |
| - |
292 |
| -# with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT): |
293 |
| -# try: |
294 |
| -# logger.info("Deploying and predicting in LOCAL_CONTAINER mode...") |
295 |
| -# predictor = model.deploy(mode=Mode.LOCAL_CONTAINER) |
296 |
| -# logger.info("Local container successfully deployed.") |
297 |
| -# predictor.predict(test_image) |
298 |
| -# except Exception as e: |
299 |
| -# logger.exception("test failed") |
300 |
| -# caught_ex = e |
301 |
| -# finally: |
302 |
| -# if model.modes[str(Mode.LOCAL_CONTAINER)].container: |
303 |
| -# model.modes[str(Mode.LOCAL_CONTAINER)].container.kill() |
304 |
| -# if caught_ex: |
305 |
| -# assert ( |
306 |
| -# False |
307 |
| -# ), f"{caught_ex} was thrown when running pytorch squeezenet local container test" |
| 226 | +@pytest.mark.skipif( |
| 227 | + PYTHON_VERSION_IS_NOT_310, |
| 228 | + reason="The goal of these test are to test the serving components of our feature", |
| 229 | +) |
| 230 | +@pytest.mark.parametrize( |
| 231 | + "model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True |
| 232 | +) |
| 233 | +def test_happy_pytorch_local_container_overwrite_to_sagemaker_endpoint( |
| 234 | + sagemaker_session, model_builder, cpu_instance_type, test_image |
| 235 | +): |
| 236 | + logger.info("Building model in LOCAL_CONTAINER mode...") |
| 237 | + caught_ex = None |
| 238 | + |
| 239 | + iam_client = sagemaker_session.boto_session.client("iam") |
| 240 | + role_arn = iam_client.get_role(RoleName=ROLE_NAME)["Role"]["Arn"] |
| 241 | + logger.debug("Role arn: %s", role_arn) |
| 242 | + |
| 243 | + model = model_builder.build( |
| 244 | + mode=Mode.LOCAL_CONTAINER, role_arn=role_arn, sagemaker_session=sagemaker_session |
| 245 | + ) |
| 246 | + |
| 247 | + with timeout(minutes=SERVE_SAGEMAKER_ENDPOINT_TIMEOUT): |
| 248 | + try: |
| 249 | + logger.info("Deploying and predicting in SAGEMAKER_ENDPOINT mode...") |
| 250 | + predictor = model.deploy( |
| 251 | + instance_type=cpu_instance_type, |
| 252 | + initial_instance_count=1, |
| 253 | + mode=Mode.SAGEMAKER_ENDPOINT, |
| 254 | + ) |
| 255 | + logger.info("Endpoint successfully deployed.") |
| 256 | + predictor.predict(test_image) |
| 257 | + except Exception as e: |
| 258 | + caught_ex = e |
| 259 | + finally: |
| 260 | + cleanup_model_resources( |
| 261 | + sagemaker_session=model_builder.sagemaker_session, |
| 262 | + model_name=model.name, |
| 263 | + endpoint_name=model.endpoint_name, |
| 264 | + ) |
| 265 | + if caught_ex: |
| 266 | + logger.exception(caught_ex) |
| 267 | + assert ( |
| 268 | + False |
| 269 | + ), f"{caught_ex} was thrown when running pytorch squeezenet sagemaker endpoint test" |
| 270 | + |
| 271 | + |
| 272 | +@pytest.mark.skipif( |
| 273 | + PYTHON_VERSION_IS_NOT_310, |
| 274 | + reason="The goal of these test are to test the serving components of our feature", |
| 275 | +) |
| 276 | +@pytest.mark.parametrize( |
| 277 | + "model_builder", ["model_builder_inference_spec_schema_builder"], indirect=True |
| 278 | +) |
| 279 | +def test_happy_pytorch_sagemaker_endpoint_overwrite_to_local_container( |
| 280 | + sagemaker_session, model_builder, test_image |
| 281 | +): |
| 282 | + logger.info("Building model in SAGEMAKER_ENDPOINT mode...") |
| 283 | + caught_ex = None |
| 284 | + |
| 285 | + iam_client = sagemaker_session.boto_session.client("iam") |
| 286 | + role_arn = iam_client.get_role(RoleName=ROLE_NAME)["Role"]["Arn"] |
| 287 | + |
| 288 | + model = model_builder.build( |
| 289 | + mode=Mode.SAGEMAKER_ENDPOINT, role_arn=role_arn, sagemaker_session=sagemaker_session |
| 290 | + ) |
| 291 | + |
| 292 | + with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT): |
| 293 | + try: |
| 294 | + logger.info("Deploying and predicting in LOCAL_CONTAINER mode...") |
| 295 | + predictor = model.deploy(mode=Mode.LOCAL_CONTAINER) |
| 296 | + logger.info("Local container successfully deployed.") |
| 297 | + predictor.predict(test_image) |
| 298 | + except Exception as e: |
| 299 | + logger.exception("test failed") |
| 300 | + caught_ex = e |
| 301 | + finally: |
| 302 | + if model.modes[str(Mode.LOCAL_CONTAINER)].container: |
| 303 | + model.modes[str(Mode.LOCAL_CONTAINER)].container.kill() |
| 304 | + if caught_ex: |
| 305 | + assert ( |
| 306 | + False |
| 307 | + ), f"{caught_ex} was thrown when running pytorch squeezenet local container test" |
0 commit comments