Skip to content

Commit d004f3c

Browse files
committed
Ensure Pt test runs
1 parent cb387bc commit d004f3c

File tree

2 files changed

+4
-5
lines changed

2 files changed

+4
-5
lines changed

src/sagemaker/serve/utils/hardware_detector.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -121,6 +121,7 @@ def _total_inference_model_size_mib(model: str, dtype: str) -> int:
121121
padding and converts to size MiB. When performing inference, expect
122122
to add up to an additional 20% to the given model size as found by EleutherAI.
123123
"""
124+
output = None
124125
try:
125126
from accelerate.commands.estimate import estimate_command_parser, gather_data
126127
args = estimate_command_parser().parse_args([model, "--dtypes", dtype])
@@ -129,8 +130,8 @@ def _total_inference_model_size_mib(model: str, dtype: str) -> int:
129130
args
130131
) # "dtype", "Largest Layer", "Total Size Bytes", "Training using Adam"
131132
except ImportError:
132-
logger.error("Install HuggingFace extras dependencies using pip install 'sagemaker["
133-
"huggingface]>=2.212.0'")
133+
logger.error("To enable Model size calculations: Install HuggingFace extras dependencies "
134+
"using pip install 'sagemaker[huggingface]>=2.212.0'")
134135

135136
if output is None:
136137
raise ValueError(f"Could not get Model size for {model}")

tests/integ/sagemaker/serve/test_serve_pt_happy.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -221,10 +221,8 @@ def test_happy_pytorch_sagemaker_endpoint(
221221
)
222222
if caught_ex:
223223
logger.exception(caught_ex)
224-
ignore_if_worker_dies = "Worker died." in str(caught_ex)
225-
# https://github.com/pytorch/serve/issues/3032
226224
assert (
227-
ignore_if_worker_dies
225+
False,
228226
), f"{caught_ex} was thrown when running pytorch squeezenet sagemaker endpoint test"
229227

230228

0 commit comments

Comments
 (0)