Skip to content

Commit 5719166

Browse files
authored
Merge branch 'master' into change/remove-setuptools-deprecation
2 parents 98c2dc0 + 4befd93 commit 5719166

File tree

79 files changed

+6230
-224
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

79 files changed

+6230
-224
lines changed

CHANGELOG.md

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,70 @@
11
# Changelog
22

3+
## v2.194.0 (2023-10-19)
4+
5+
### Features
6+
7+
* Added register step in Jumpstart model
8+
* jumpstart instance specific metric definitions
9+
10+
### Bug Fixes and Other Changes
11+
12+
* Updates for DJL 0.24.0 Release
13+
* use getter for resource-metadata dict
14+
* add method to Model class to check if repack is needed
15+
16+
## v2.193.0 (2023-10-18)
17+
18+
### Features
19+
20+
* jumpstart model artifact instance type variants
21+
* jumpstart instance specific hyperparameters
22+
* Feature Processor event based triggers (#1132)
23+
* Support job checkpoint in remote function
24+
* jumpstart model package arn instance type variants
25+
26+
### Bug Fixes and Other Changes
27+
28+
* Fix hyperlinks in feature_processor.scheduler parameter descriptions
29+
* add image_uris_unit_test pytest mark
30+
* bump apache-airflow to `v2.7.2`
31+
* clone distribution in validate_distribution
32+
* fix flaky Inference Recommender integration tests
33+
34+
### Documentation Changes
35+
36+
* Update PipelineModel.register documentation
37+
* specify that input_shape in no longer required for torch 2.0 mod…
38+
39+
## v2.192.1 (2023-10-13)
40+
41+
### Bug Fixes and Other Changes
42+
43+
* update local mode schema
44+
* import error in unsupported js regions
45+
* Update Ec2 instance type to g5.4xlarge in test_huggingface_torch_distributed.py
46+
47+
## v2.192.0 (2023-10-11)
48+
49+
### Features
50+
51+
* jumpstart estimator enable infra check flag
52+
* jumpstart default payloads
53+
* allow non-python files in job dependencies
54+
* allow configuring docker container in local mode
55+
56+
### Bug Fixes and Other Changes
57+
58+
* js tagging s3 prefix
59+
* Batch transform: Add support for split_type == "None" in local mode
60+
* use correct line endings and s3 uris on windows
61+
* Fixed bug in _create_training_details
62+
* DJL Neuronx 0.24.0
63+
64+
### Documentation Changes
65+
66+
* Include FeatureGroup's load_feature_definitions API documentation
67+
368
## v2.191.0 (2023-10-05)
469

570
### Features

VERSION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
2.191.1.dev0
1+
2.194.1.dev0

doc/amazon_sagemaker_featurestore.rst

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -230,9 +230,11 @@ The following code from the fraud detection example shows a minimal
230230
    enable_online_store=True
231231
)
232232
233-
Creating a feature group takes time as the data is loaded. You will need
234-
to wait until it is created before you can use it. You can check status
235-
using the following method.
233+
Creating a feature group takes time as the data is loaded. You will
234+
need to wait until it is created before you can use it. You can
235+
check status using the following method. Note that it can take
236+
approximately 10-15 minutes to provision an online ``FeatureGroup``
237+
with the ``InMemory`` ``StorageType``.
236238

237239
.. code:: python
238240
@@ -480,7 +482,9 @@ Feature Store `DatasetBuilder API Reference
480482
.. rubric:: Delete a feature group
481483
:name: bCe9CA61b78
482484

483-
You can delete a feature group with the ``delete`` function.
485+
You can delete a feature group with the ``delete`` function. Note that it
486+
can take approximately 10-15 minutes to delete an online ``FeatureGroup``
487+
with the ``InMemory`` ``StorageType``.
484488

485489
.. code:: python
486490

requirements/extras/test_requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ awslogs==0.14.0
1212
black==22.3.0
1313
stopit==1.1.2
1414
# Update tox.ini to have correct version of airflow constraints file
15-
apache-airflow==2.7.1
15+
apache-airflow==2.7.2
1616
apache-airflow-providers-amazon==7.2.1
1717
attrs>=23.1.0,<24
1818
fabric==2.6.0

src/sagemaker/base_predictor.py

Lines changed: 39 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from __future__ import print_function, absolute_import
1515

1616
import abc
17-
from typing import Any, Tuple
17+
from typing import Any, Optional, Tuple, Union
1818

1919
from sagemaker.deprecations import (
2020
deprecated_class,
@@ -32,6 +32,9 @@
3232
StreamDeserializer,
3333
StringDeserializer,
3434
)
35+
from sagemaker.jumpstart.payload_utils import PayloadSerializer
36+
from sagemaker.jumpstart.types import JumpStartSerializablePayload
37+
from sagemaker.jumpstart.utils import get_jumpstart_content_bucket
3538
from sagemaker.model_monitor import (
3639
DataCaptureConfig,
3740
DefaultModelMonitor,
@@ -201,20 +204,44 @@ def _create_request_args(
201204
custom_attributes=None,
202205
):
203206
"""Placeholder docstring"""
207+
208+
jumpstart_serialized_data: Optional[Union[str, bytes]] = None
209+
jumpstart_accept: Optional[str] = None
210+
jumpstart_content_type: Optional[str] = None
211+
212+
if isinstance(data, JumpStartSerializablePayload):
213+
s3_client = self.sagemaker_session.s3_client
214+
region = self.sagemaker_session._region_name
215+
bucket = get_jumpstart_content_bucket(region)
216+
217+
jumpstart_serialized_data = PayloadSerializer(
218+
bucket=bucket, region=region, s3_client=s3_client
219+
).serialize(data)
220+
jumpstart_content_type = data.content_type
221+
jumpstart_accept = data.accept
222+
204223
args = dict(initial_args) if initial_args else {}
205224

206225
if "EndpointName" not in args:
207226
args["EndpointName"] = self.endpoint_name
208227

209228
if "ContentType" not in args:
210-
args["ContentType"] = (
211-
self.content_type
212-
if isinstance(self.content_type, str)
213-
else ", ".join(self.content_type)
214-
)
229+
if isinstance(data, JumpStartSerializablePayload) and jumpstart_content_type:
230+
args["ContentType"] = jumpstart_content_type
231+
else:
232+
args["ContentType"] = (
233+
self.content_type
234+
if isinstance(self.content_type, str)
235+
else ", ".join(self.content_type)
236+
)
215237

216238
if "Accept" not in args:
217-
args["Accept"] = self.accept if isinstance(self.accept, str) else ", ".join(self.accept)
239+
if isinstance(data, JumpStartSerializablePayload) and jumpstart_accept:
240+
args["Accept"] = jumpstart_accept
241+
else:
242+
args["Accept"] = (
243+
self.accept if isinstance(self.accept, str) else ", ".join(self.accept)
244+
)
218245

219246
if target_model:
220247
args["TargetModel"] = target_model
@@ -228,7 +255,11 @@ def _create_request_args(
228255
if custom_attributes:
229256
args["CustomAttributes"] = custom_attributes
230257

231-
data = self.serializer.serialize(data)
258+
data = (
259+
jumpstart_serialized_data
260+
if isinstance(data, JumpStartSerializablePayload) and jumpstart_serialized_data
261+
else self.serializer.serialize(data)
262+
)
232263

233264
args["Body"] = data
234265
return args

src/sagemaker/config/config_schema.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,7 @@
106106
LOCAL_CODE = "local_code"
107107
SERVING_PORT = "serving_port"
108108
CONTAINER_CONFIG = "container_config"
109+
CONTAINER_ROOT = "container_root"
109110
REGION_NAME = "region_name"
110111

111112

@@ -1090,6 +1091,9 @@ def _simple_path(*args: str):
10901091
SERVING_PORT: {
10911092
TYPE: "integer",
10921093
},
1094+
CONTAINER_ROOT: {
1095+
TYPE: "string",
1096+
},
10931097
CONTAINER_CONFIG: {
10941098
TYPE: OBJECT,
10951099
},

src/sagemaker/djl_inference/model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -781,7 +781,7 @@ def serving_image_uri(self, region_name):
781781
str: The appropriate image URI based on the given parameters.
782782
"""
783783
if not self.djl_version:
784-
self.djl_version = "0.23.0"
784+
self.djl_version = "0.24.0"
785785

786786
return image_uris.retrieve(
787787
self._framework(),

src/sagemaker/estimator.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3082,6 +3082,7 @@ def __init__(
30823082
hyperparameters=hyperparameters,
30833083
instance_groups=instance_groups,
30843084
training_repository_access_mode=training_repository_access_mode,
3085+
enable_infra_check=enable_infra_check,
30853086
training_repository_credentials_provider_arn=training_repository_credentials_provider_arn, # noqa: E501 # pylint: disable=line-too-long
30863087
container_entry_point=container_entry_point,
30873088
container_arguments=container_arguments,

src/sagemaker/feature_store/feature_processor/__init__.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,16 @@
3030
to_pipeline,
3131
schedule,
3232
describe,
33+
put_trigger,
34+
delete_trigger,
35+
enable_trigger,
36+
disable_trigger,
3337
delete_schedule,
3438
list_pipelines,
3539
execute,
3640
TransformationCode,
41+
FeatureProcessorPipelineEvents,
42+
)
43+
from sagemaker.feature_store.feature_processor._enums import ( # noqa: F401
44+
FeatureProcessorPipelineExecutionStatus,
3745
)

src/sagemaker/feature_store/feature_processor/_constants.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
DEFAULT_INSTANCE_TYPE = "ml.m5.xlarge"
1919
DEFAULT_SCHEDULE_STATE = "ENABLED"
20+
DEFAULT_TRIGGER_STATE = "ENABLED"
2021
UNDERSCORE = "_"
2122
RESOURCE_NOT_FOUND_EXCEPTION = "ResourceNotFoundException"
2223
RESOURCE_NOT_FOUND = "ResourceNotFound"
@@ -36,6 +37,8 @@
3637
FEATURE_PROCESSOR_TAG_KEY = "sm-fs-fe:created-from"
3738
FEATURE_PROCESSOR_TAG_VALUE = "fp-to-pipeline"
3839
FEATURE_GROUP_ARN_REGEX_PATTERN = r"arn:(.*?):sagemaker:(.*?):(.*?):feature-group/(.*?)$"
40+
PIPELINE_ARN_REGEX_PATTERN = r"arn:(.*?):sagemaker:(.*?):(.*?):pipeline/(.*?)$"
41+
EVENTBRIDGE_RULE_ARN_REGEX_PATTERN = r"arn:(.*?):events:(.*?):(.*?):rule/(.*?)$"
3942
SAGEMAKER_WHL_FILE_S3_PATH = "s3://ada-private-beta/sagemaker-2.151.1.dev0-py2.py3-none-any.whl"
4043
S3_DATA_DISTRIBUTION_TYPE = "FullyReplicated"
4144
PIPELINE_CONTEXT_NAME_TAG_KEY = "sm-fs-fe:feature-engineering-pipeline-context-name"
@@ -45,3 +48,7 @@
4548
PIPELINE_CONTEXT_NAME_TAG_KEY,
4649
PIPELINE_VERSION_CONTEXT_NAME_TAG_KEY,
4750
]
51+
BASE_EVENT_PATTERN = {
52+
"source": ["aws.sagemaker"],
53+
"detail": {"currentPipelineExecutionStatus": [], "pipelineArn": []},
54+
}

src/sagemaker/feature_store/feature_processor/_enums.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,3 +21,13 @@ class FeatureProcessorMode(Enum):
2121

2222
PYSPARK = "pyspark" # Execute a pyspark job.
2323
PYTHON = "python" # Execute a regular python script.
24+
25+
26+
class FeatureProcessorPipelineExecutionStatus(Enum):
27+
"""Enum of feature_processor pipeline execution status."""
28+
29+
EXECUTING = "Executing"
30+
STOPPING = "Stopping"
31+
STOPPED = "Stopped"
32+
FAILED = "Failed"
33+
SUCCEEDED = "Succeeded"

0 commit comments

Comments
 (0)