Skip to content

Commit 31c3ce9

Browse files
authored
Merge branch 'master' into master
2 parents a4adceb + 11df130 commit 31c3ce9

20 files changed

+237
-46
lines changed

CHANGELOG.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,15 @@
11
# Changelog
22

3+
## v1.69.0 (2020-07-09)
4+
5+
### Features
6+
7+
* Add ModelClientConfig Fields for Batch Transform
8+
9+
### Documentation Changes
10+
11+
* add KFP Processing component
12+
313
## v1.68.0 (2020-07-07)
414

515
### Features

VERSION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
1.68.1.dev0
1+
1.69.1.dev0

doc/frameworks/mxnet/using_mxnet.rst

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -321,13 +321,14 @@ If there are other packages you want to use with your script, you can include a
321321
Both ``requirements.txt`` and your training script should be put in the same folder.
322322
You must specify this folder in ``source_dir`` argument when creating an MXNet estimator.
323323

324-
The function of installing packages using ``requirements.txt`` is supported for all MXNet versions during training.
324+
The function of installing packages using ``requirements.txt`` is supported for MXNet versions 1.3.0 and higher during training.
325+
325326
When serving an MXNet model, support for this function varies with MXNet versions.
326327
For MXNet 1.6.0 or newer, ``requirements.txt`` must be under folder ``code``.
327328
The SageMaker MXNet Estimator automatically saves ``code`` in ``model.tar.gz`` after training (assuming you set up your script and ``requirements.txt`` correctly as stipulated in the previous paragraph).
328329
In the case of bringing your own trained model for deployment, you must save ``requirements.txt`` under folder ``code`` in ``model.tar.gz`` yourself or specify it through ``dependencies``.
329-
For MXNet 1.4.1, ``requirements.txt`` is not supported for inference.
330-
For MXNet 0.12.1-1.3.0, ``requirements.txt`` must be in ``source_dir``.
330+
For MXNet 0.12.1-1.2.1, 1.4.0-1.4.1, ``requirements.txt`` is not supported for inference.
331+
For MXNet 1.3.0, ``requirements.txt`` must be in ``source_dir``.
331332

332333
A ``requirements.txt`` file is a text file that contains a list of items that are installed by using ``pip install``.
333334
You can also specify the version of an item to install.

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ def read_version():
3434

3535
# Declare minimal set for installation
3636
required_packages = [
37-
"boto3>=1.13.24",
37+
"boto3>=1.14.12",
3838
"numpy>=1.9.0",
3939
"protobuf>=3.1",
4040
"scipy>=0.19.0",

src/sagemaker/analytics.py

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -431,6 +431,8 @@ def __init__(
431431
metric_names=None,
432432
parameter_names=None,
433433
sagemaker_session=None,
434+
input_artifact_names=None,
435+
output_artifact_names=None,
434436
):
435437
"""Initialize a ``ExperimentAnalytics`` instance.
436438
@@ -450,6 +452,11 @@ def __init__(
450452
sagemaker_session (sagemaker.session.Session): Session object which manages interactions
451453
with Amazon SageMaker APIs and any other AWS services needed. If not specified,
452454
one is created using the default AWS configuration chain.
455+
input_artifact_names(dict optional):The input artifacts for the experiment. Examples of
456+
input artifacts are datasets, algorithms, hyperparameters, source code, and instance
457+
types.
458+
output_artifact_names(dict optional): The output artifacts for the experiment. Examples
459+
of output artifacts are metrics, snapshots, logs, and images.
453460
"""
454461
sagemaker_session = sagemaker_session or Session()
455462
self._sage_client = sagemaker_session.sagemaker_client
@@ -463,6 +470,8 @@ def __init__(
463470
self._sort_order = sort_order
464471
self._metric_names = metric_names
465472
self._parameter_names = parameter_names
473+
self._input_artifact_names = input_artifact_names
474+
self._output_artifact_names = output_artifact_names
466475
self._trial_components = None
467476
super(ExperimentAnalytics, self).__init__()
468477
self.clear_cache()
@@ -516,6 +525,21 @@ def _reshape_metrics(self, metrics):
516525
out["{} - {}".format(metric_name, stat_type)] = stat_value
517526
return out
518527

528+
def _reshape_artifacts(self, artifacts, _artifact_names):
529+
"""Reshape trial component input/output artifacts to a pandas column
530+
Args:
531+
artifacts: trial component input/output artifacts
532+
Returns:
533+
dict: Key: artifacts name, Value: artifacts value
534+
"""
535+
out = OrderedDict()
536+
for name, value in sorted(artifacts.items()):
537+
if _artifact_names and (name not in _artifact_names):
538+
continue
539+
out["{} - {}".format(name, "MediaType")] = value.get("MediaType")
540+
out["{} - {}".format(name, "Value")] = value.get("Value")
541+
return out
542+
519543
def _reshape(self, trial_component):
520544
"""Reshape trial component data to pandas columns
521545
Args:
@@ -533,6 +557,16 @@ def _reshape(self, trial_component):
533557

534558
out.update(self._reshape_parameters(trial_component.get("Parameters", [])))
535559
out.update(self._reshape_metrics(trial_component.get("Metrics", [])))
560+
out.update(
561+
self._reshape_artifacts(
562+
trial_component.get("InputArtifacts", []), self._input_artifact_names
563+
)
564+
)
565+
out.update(
566+
self._reshape_artifacts(
567+
trial_component.get("OutputArtifacts", []), self._output_artifact_names
568+
)
569+
)
536570
return out
537571

538572
def _fetch_dataframe(self):

src/sagemaker/model_monitor/model_monitoring.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,7 @@
5757
"ca-central-1": "536280801234",
5858
"cn-north-1": "453000072557",
5959
"cn-northwest-1": "453252182341",
60+
"us-gov-west-1": "362178532790",
6061
}
6162

6263
STATISTICS_JSON_DEFAULT_FILE_NAME = "statistics.json"

src/sagemaker/session.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1996,6 +1996,7 @@ def transform(
19961996
experiment_config,
19971997
tags,
19981998
data_processing,
1999+
model_client_config=None,
19992000
):
20002001
"""Create an Amazon SageMaker transform job.
20012002
@@ -2020,6 +2021,9 @@ def transform(
20202021
data_processing(dict): A dictionary describing config for combining the input data and
20212022
transformed data. For more, see
20222023
https://docs.aws.amazon.com/sagemaker/latest/dg/API_Tag.html.
2024+
model_client_config (dict): A dictionary describing the model configuration for the
2025+
job. Dictionary contains two optional keys,
2026+
'InvocationsTimeoutInSeconds', and 'InvocationsMaxRetries'.
20232027
"""
20242028
transform_request = {
20252029
"TransformJobName": job_name,
@@ -2050,6 +2054,9 @@ def transform(
20502054
if experiment_config and len(experiment_config) > 0:
20512055
transform_request["ExperimentConfig"] = experiment_config
20522056

2057+
if model_client_config and len(model_client_config) > 0:
2058+
transform_request["ModelClientConfig"] = model_client_config
2059+
20532060
LOGGER.info("Creating transform job with name: %s", job_name)
20542061
LOGGER.debug("Transform request: %s", json.dumps(transform_request, indent=4))
20552062
self.sagemaker_client.create_transform_job(**transform_request)

src/sagemaker/tensorflow/defaults.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
LATEST_VERSION = "2.2.0"
2222
"""The latest version of TensorFlow included in the SageMaker pre-built Docker images."""
2323

24-
LATEST_SERVING_VERSION = "2.1.0"
24+
LATEST_SERVING_VERSION = "2.2.0"
2525
"""The latest version of TensorFlow Serving included in the SageMaker pre-built Docker images."""
2626

2727
LATEST_PY2_VERSION = "2.1.0"

src/sagemaker/transformer.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,6 +120,7 @@ def transform(
120120
output_filter=None,
121121
join_source=None,
122122
experiment_config=None,
123+
model_client_config=None,
123124
wait=False,
124125
logs=False,
125126
):
@@ -172,6 +173,10 @@ def transform(
172173
Dictionary contains three optional keys,
173174
'ExperimentName', 'TrialName', and 'TrialComponentDisplayName'.
174175
(default: ``None``).
176+
model_client_config (dict[str, str]): Model configuration.
177+
Dictionary contains two optional keys,
178+
'InvocationsTimeoutInSeconds', and 'InvocationsMaxRetries'.
179+
(default: ``None``).
175180
wait (bool): Whether the call should wait until the job completes
176181
(default: False).
177182
logs (bool): Whether to show the logs produced by the job.
@@ -208,6 +213,7 @@ def transform(
208213
output_filter,
209214
join_source,
210215
experiment_config,
216+
model_client_config,
211217
)
212218

213219
if wait:
@@ -342,6 +348,7 @@ def start_new(
342348
output_filter,
343349
join_source,
344350
experiment_config,
351+
model_client_config,
345352
):
346353
"""
347354
Args:
@@ -355,6 +362,7 @@ def start_new(
355362
output_filter:
356363
join_source:
357364
experiment_config:
365+
model_client_config:
358366
"""
359367
config = _TransformJob._load_config(
360368
data, data_type, content_type, compression_type, split_type, transformer
@@ -374,6 +382,7 @@ def start_new(
374382
output_config=config["output_config"],
375383
resource_config=config["resource_config"],
376384
experiment_config=experiment_config,
385+
model_client_config=model_client_config,
377386
tags=transformer.tags,
378387
data_processing=data_processing,
379388
)

tests/conftest.py

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
from sagemaker.rl import RLEstimator
2929
from sagemaker.sklearn.defaults import SKLEARN_VERSION
3030
from sagemaker.tensorflow import TensorFlow
31-
from sagemaker.tensorflow.defaults import LATEST_VERSION, LATEST_SERVING_VERSION
31+
from sagemaker.tensorflow.defaults import LATEST_VERSION
3232

3333
DEFAULT_REGION = "us-west-2"
3434
CUSTOM_BUCKET_NAME_PREFIX = "sagemaker-custom-bucket"
@@ -336,10 +336,3 @@ def pytest_generate_tests(metafunc):
336336
@pytest.fixture(scope="module")
337337
def xgboost_full_version(request):
338338
return request.config.getoption("--xgboost-full-version")
339-
340-
341-
@pytest.fixture(scope="module")
342-
def tf_serving_version(tf_full_version):
343-
if tf_full_version == LATEST_VERSION:
344-
return LATEST_SERVING_VERSION
345-
return tf_full_version

tests/integ/test_data_capture_config.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@
4141

4242

4343
def test_enabling_data_capture_on_endpoint_shows_correct_data_capture_status(
44-
sagemaker_session, tf_serving_version
44+
sagemaker_session, tf_full_version
4545
):
4646
endpoint_name = unique_name_from_base("sagemaker-tensorflow-serving")
4747
model_data = sagemaker_session.upload_data(
@@ -52,7 +52,7 @@ def test_enabling_data_capture_on_endpoint_shows_correct_data_capture_status(
5252
model = Model(
5353
model_data=model_data,
5454
role=ROLE,
55-
framework_version=tf_serving_version,
55+
framework_version=tf_full_version,
5656
sagemaker_session=sagemaker_session,
5757
)
5858
predictor = model.deploy(
@@ -98,7 +98,7 @@ def test_enabling_data_capture_on_endpoint_shows_correct_data_capture_status(
9898

9999

100100
def test_disabling_data_capture_on_endpoint_shows_correct_data_capture_status(
101-
sagemaker_session, tf_serving_version
101+
sagemaker_session, tf_full_version
102102
):
103103
endpoint_name = unique_name_from_base("sagemaker-tensorflow-serving")
104104
model_data = sagemaker_session.upload_data(
@@ -109,7 +109,7 @@ def test_disabling_data_capture_on_endpoint_shows_correct_data_capture_status(
109109
model = Model(
110110
model_data=model_data,
111111
role=ROLE,
112-
framework_version=tf_serving_version,
112+
framework_version=tf_full_version,
113113
sagemaker_session=sagemaker_session,
114114
)
115115
destination_s3_uri = os.path.join(
@@ -184,7 +184,7 @@ def test_disabling_data_capture_on_endpoint_shows_correct_data_capture_status(
184184

185185

186186
def test_updating_data_capture_on_endpoint_shows_correct_data_capture_status(
187-
sagemaker_session, tf_serving_version
187+
sagemaker_session, tf_full_version
188188
):
189189
endpoint_name = sagemaker.utils.unique_name_from_base("sagemaker-tensorflow-serving")
190190
model_data = sagemaker_session.upload_data(
@@ -195,7 +195,7 @@ def test_updating_data_capture_on_endpoint_shows_correct_data_capture_status(
195195
model = Model(
196196
model_data=model_data,
197197
role=ROLE,
198-
framework_version=tf_serving_version,
198+
framework_version=tf_full_version,
199199
sagemaker_session=sagemaker_session,
200200
)
201201
destination_s3_uri = os.path.join(

tests/integ/test_experiments_analytics.py

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,65 @@ def experiment(sagemaker_session):
4343
_delete_resources(sm, experiment_name, trials)
4444

4545

46+
@contextmanager
47+
def experiment_with_artifacts(sagemaker_session):
48+
sm = sagemaker_session.sagemaker_client
49+
trials = {} # for resource cleanup
50+
51+
experiment_name = "experiment-" + str(uuid.uuid4())
52+
try:
53+
sm.create_experiment(ExperimentName=experiment_name)
54+
55+
# Search returns 10 results by default. Add 20 trials to verify pagination.
56+
for i in range(20):
57+
trial_name = "trial-" + str(uuid.uuid4())
58+
sm.create_trial(TrialName=trial_name, ExperimentName=experiment_name)
59+
60+
trial_component_name = "tc-" + str(uuid.uuid4())
61+
trials[trial_name] = trial_component_name
62+
63+
sm.create_trial_component(
64+
TrialComponentName=trial_component_name, DisplayName="Training"
65+
)
66+
sm.update_trial_component(
67+
TrialComponentName=trial_component_name,
68+
Parameters={"hp1": {"NumberValue": i}},
69+
InputArtifacts={
70+
"inputArtifacts1": {"MediaType": "text/csv", "Value": "s3:/foo/bar1"}
71+
},
72+
OutputArtifacts={
73+
"outputArtifacts1": {"MediaType": "text/plain", "Value": "s3:/foo/bar2"}
74+
},
75+
)
76+
sm.associate_trial_component(
77+
TrialComponentName=trial_component_name, TrialName=trial_name
78+
)
79+
80+
time.sleep(15) # wait for search to get updated
81+
82+
yield experiment_name
83+
finally:
84+
_delete_resources(sm, experiment_name, trials)
85+
86+
87+
@pytest.mark.canary_quick
88+
def test_experiment_analytics_artifacts(sagemaker_session):
89+
with experiment_with_artifacts(sagemaker_session) as experiment_name:
90+
analytics = ExperimentAnalytics(
91+
experiment_name=experiment_name, sagemaker_session=sagemaker_session
92+
)
93+
94+
assert list(analytics.dataframe().columns) == [
95+
"TrialComponentName",
96+
"DisplayName",
97+
"hp1",
98+
"inputArtifacts1 - MediaType",
99+
"inputArtifacts1 - Value",
100+
"outputArtifacts1 - MediaType",
101+
"outputArtifacts1 - Value",
102+
]
103+
104+
46105
@pytest.mark.canary_quick
47106
def test_experiment_analytics(sagemaker_session):
48107
with experiment(sagemaker_session) as experiment_name:

tests/integ/test_model_monitor.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@
8888

8989

9090
@pytest.fixture(scope="module")
91-
def predictor(sagemaker_session, tf_serving_version):
91+
def predictor(sagemaker_session, tf_full_version):
9292
endpoint_name = unique_name_from_base("sagemaker-tensorflow-serving")
9393
model_data = sagemaker_session.upload_data(
9494
path=os.path.join(tests.integ.DATA_DIR, "tensorflow-serving-test-model.tar.gz"),
@@ -100,7 +100,7 @@ def predictor(sagemaker_session, tf_serving_version):
100100
model = Model(
101101
model_data=model_data,
102102
role=ROLE,
103-
framework_version=tf_serving_version,
103+
framework_version=tf_full_version,
104104
sagemaker_session=sagemaker_session,
105105
)
106106
predictor = model.deploy(

0 commit comments

Comments
 (0)