Skip to content

Commit 1eae9d5

Browse files
authored
Merge branch 'master' into xgboost_1.2
2 parents f02bdf9 + 6eaea0d commit 1eae9d5

File tree

11 files changed

+74
-14
lines changed

11 files changed

+74
-14
lines changed

CHANGELOG.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,11 @@
11
# Changelog
22

3+
## v2.10.0 (2020-09-23)
4+
5+
### Features
6+
7+
* add inferentia pytorch inference container config
8+
39
## v2.9.2 (2020-09-21)
410

511
### Bug Fixes and Other Changes

README.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ Setup a Python environment, and install the dependencies listed in ``doc/require
164164
# conda
165165
conda create -n sagemaker python=3.7
166166
conda activate sagemaker
167-
conda install --file doc/requirements.txt
167+
conda install sphinx=3.1.1 sphinx_rtd_theme=0.5.0
168168

169169
# pip
170170
pip install -r doc/requirements.txt
@@ -201,7 +201,7 @@ In order to host a SparkML model in SageMaker, it should be serialized with ``ML
201201

202202
For more information on MLeap, see https://github.com/combust/mleap .
203203

204-
Supported major version of Spark: 2.2 (MLeap version - 0.9.6)
204+
Supported major version of Spark: 2.4 (MLeap version - 0.9.6)
205205

206206
Here is an example on how to create an instance of ``SparkMLModel`` class and use ``deploy()`` method to create an
207207
endpoint which can be used to perform prediction against your trained SparkML Model.

VERSION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
2.9.3.dev0
1+
2.10.1.dev0
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
{
2+
"processors": ["inf"],
3+
"scope": ["inference"],
4+
"versions": {
5+
"1.5.1": {
6+
"py_versions": ["py3"],
7+
"registries": {
8+
"us-east-1": "785573368785",
9+
"us-west-2": "301217895009"
10+
},
11+
"repository": "sagemaker-neo-pytorch"
12+
}
13+
}
14+
}

src/sagemaker/image_uri_config/sparkml-serving.json

Lines changed: 30 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,5 +30,34 @@
3030
},
3131
"repository": "sagemaker-sparkml-serving"
3232
}
33-
}
33+
},
34+
"2.4": {
35+
"registries": {
36+
"af-south-1": "510948584623",
37+
"ap-east-1": "651117190479",
38+
"ap-northeast-1": "354813040037",
39+
"ap-northeast-2": "366743142698",
40+
"ap-south-1": "720646828776",
41+
"ap-southeast-1": "121021644041",
42+
"ap-southeast-2": "783357654285",
43+
"ca-central-1": "341280168497",
44+
"cn-north-1": "450853457545",
45+
"cn-northwest-1": "451049120500",
46+
"eu-central-1": "492215442770",
47+
"eu-north-1": "662702820516",
48+
"eu-west-1": "141502667606",
49+
"eu-west-2": "764974769150",
50+
"eu-west-3": "659782779980",
51+
"eu-south-1": "978288397137",
52+
"me-south-1": "801668240914",
53+
"sa-east-1": "737474898029",
54+
"us-east-1": "683313688378",
55+
"us-east-2": "257758044811",
56+
"us-gov-west-1": "414596584902",
57+
"us-iso-east-1": "833128469047",
58+
"us-west-1": "746614075791",
59+
"us-west-2": "246618743249"
60+
},
61+
"repository": "sagemaker-sparkml-serving"
62+
}
3463
}

src/sagemaker/sparkml/model.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ class SparkMLModel(Model):
5959
model .
6060
"""
6161

62-
def __init__(self, model_data, role=None, spark_version=2.2, sagemaker_session=None, **kwargs):
62+
def __init__(self, model_data, role=None, spark_version=2.4, sagemaker_session=None, **kwargs):
6363
"""Initialize a SparkMLModel.
6464
6565
Args:
@@ -73,7 +73,7 @@ def __init__(self, model_data, role=None, spark_version=2.2, sagemaker_session=N
7373
artifacts. After the endpoint is created, the inference code
7474
might use the IAM role, if it needs to access an AWS resource.
7575
spark_version (str): Spark version you want to use for executing the
76-
inference (default: '2.2').
76+
inference (default: '2.4').
7777
sagemaker_session (sagemaker.session.Session): Session object which
7878
manages interactions with Amazon SageMaker APIs and any other
7979
AWS services needed. If not specified, the estimator creates one

tests/conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@
4444
"coach_tensorflow",
4545
"inferentia_mxnet",
4646
"inferentia_tensorflow",
47+
"inferentia_pytorch",
4748
"mxnet",
4849
"neo_mxnet",
4950
"neo_pytorch",

tests/integ/test_neo_mxnet.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,9 @@ def mxnet_training_job(
5858

5959

6060
@pytest.mark.canary_quick
61+
@pytest.mark.skip(
62+
reason="This test is failing because the image uri and the training script format has changed."
63+
)
6164
def test_attach_deploy(
6265
mxnet_training_job, sagemaker_session, cpu_instance_type, cpu_instance_family
6366
):
@@ -86,6 +89,9 @@ def test_attach_deploy(
8689
predictor.predict(data)
8790

8891

92+
@pytest.mark.skip(
93+
reason="This test is failing because the image uri and the training script format has changed."
94+
)
8995
def test_deploy_model(
9096
mxnet_training_job,
9197
sagemaker_session,

tests/integ/test_sparkml_serving.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -17,19 +17,18 @@
1717

1818
import pytest
1919

20+
from botocore.errorfactory import ClientError
21+
2022
from sagemaker.sparkml.model import SparkMLModel
2123
from sagemaker.utils import sagemaker_timestamp
2224
from tests.integ import DATA_DIR
2325
from tests.integ.timeout import timeout_and_delete_endpoint_by_name
2426

2527

2628
@pytest.mark.canary_quick
27-
@pytest.mark.skip(
28-
reason="This test has always failed, but the failure was masked by a bug. "
29-
"This test should be fixed. Details in https://github.com/aws/sagemaker-python-sdk/pull/968"
30-
)
3129
def test_sparkml_model_deploy(sagemaker_session, cpu_instance_type):
32-
# Uploads an MLeap serialized MLeap model to S3 and use that to deploy a SparkML model to perform inference
30+
# Uploads an MLeap serialized MLeap model to S3 and use that to deploy
31+
# a SparkML model to perform inference
3332
data_path = os.path.join(DATA_DIR, "sparkml_model")
3433
endpoint_name = "test-sparkml-deploy-{}".format(sagemaker_timestamp())
3534
model_data = sagemaker_session.upload_data(
@@ -59,7 +58,8 @@ def test_sparkml_model_deploy(sagemaker_session, cpu_instance_type):
5958
predictor = model.deploy(1, cpu_instance_type, endpoint_name=endpoint_name)
6059

6160
valid_data = "1.0,C,38.0,71.5,1.0,female"
62-
assert predictor.predict(valid_data) == "1.0,0.0,38.0,1.0,71.5,0.0,1.0"
61+
assert predictor.predict(valid_data) == b"1.0,0.0,38.0,1.0,71.5,0.0,1.0"
6362

6463
invalid_data = "1.0,28.0,C,38.0,71.5,1.0"
65-
assert predictor.predict(invalid_data) is None
64+
with pytest.raises(ClientError):
65+
predictor.predict(invalid_data)

tests/unit/sagemaker/image_uris/test_neo.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,10 @@ def test_inferentia_tensorflow(inferentia_tensorflow_version):
118118
_test_inferentia_framework_uris("tensorflow", inferentia_tensorflow_version)
119119

120120

121+
def test_inferentia_pytorch(inferentia_pytorch_version):
122+
_test_inferentia_framework_uris("pytorch", inferentia_pytorch_version)
123+
124+
121125
def _expected_framework_uri(framework, version, region="us-west-2", processor="cpu"):
122126
return expected_uris.framework_uri(
123127
"sagemaker-{}".format(framework),

tests/unit/test_sparkml_serving.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ def sagemaker_session():
4949

5050
def test_sparkml_model(sagemaker_session):
5151
sparkml = SparkMLModel(sagemaker_session=sagemaker_session, model_data=MODEL_DATA, role=ROLE)
52-
assert sparkml.image_uri == image_uris.retrieve("sparkml-serving", REGION, version="2.2")
52+
assert sparkml.image_uri == image_uris.retrieve("sparkml-serving", REGION, version="2.4")
5353

5454

5555
def test_predictor_type(sagemaker_session):

0 commit comments

Comments
 (0)