Skip to content

change: fix attach for 1P algorithm estimators #931

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Jul 16, 2019
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 29 additions & 5 deletions src/sagemaker/tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from enum import Enum

import sagemaker
from sagemaker.amazon.amazon_estimator import RecordSet
from sagemaker.amazon.amazon_estimator import RecordSet, AmazonAlgorithmEstimatorBase
from sagemaker.amazon.hyperparameter import Hyperparameter as hp # noqa
from sagemaker.analytics import HyperparameterTuningJobAnalytics
from sagemaker.estimator import Framework
Expand Down Expand Up @@ -358,7 +358,7 @@ def attach(cls, tuning_job_name, sagemaker_session=None, job_details=None, estim
estimator_cls, job_details["TrainingJobDefinition"]
)
estimator = cls._prepare_estimator_from_job_description(
estimator_cls, job_details["TrainingJobDefinition"], sagemaker_session
estimator_cls, job_details, sagemaker_session
)
init_params = cls._prepare_init_params_from_job_description(job_details)

Expand Down Expand Up @@ -497,16 +497,25 @@ def _prepare_estimator_cls(cls, estimator_cls, training_details):
)

@classmethod
def _prepare_estimator_from_job_description(
cls, estimator_cls, training_details, sagemaker_session
):
def _prepare_estimator_from_job_description(cls, estimator_cls, job_details, sagemaker_session):
training_details = job_details["TrainingJobDefinition"]

# Swap name for static hyperparameters to what an estimator would expect
training_details["HyperParameters"] = training_details["StaticHyperParameters"]
del training_details["StaticHyperParameters"]

# Remove hyperparameter reserved by SageMaker for tuning jobs
del training_details["HyperParameters"]["_tuning_objective_metric"]

# Add missing hyperparameters defined in the hyperparameter ranges,
# as potentially required in the Amazon algorithm estimator's constructor
if issubclass(estimator_cls, AmazonAlgorithmEstimatorBase):
parameter_ranges = job_details["HyperParameterTuningJobConfig"]["ParameterRanges"]
additional_hyperparameters = cls._extract_hyperparmeters_from_parameter_ranges(
parameter_ranges
)
training_details["HyperParameters"].update(additional_hyperparameters)

# Add items expected by the estimator (but aren't needed otherwise)
training_details["TrainingJobName"] = ""
if "KmsKeyId" not in training_details["OutputDataConfig"]:
Expand Down Expand Up @@ -559,6 +568,21 @@ def _prepare_parameter_ranges(cls, parameter_ranges):

return ranges

@classmethod
def _extract_hyperparmeters_from_parameter_ranges(cls, parameter_ranges):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

s/hyperparmeters/hyperparameters

hyperparameters = {}

for parameter in parameter_ranges["CategoricalParameterRanges"]:
hyperparameters[parameter["Name"]] = parameter["Values"][0]

for parameter in parameter_ranges["ContinuousParameterRanges"]:
hyperparameters[parameter["Name"]] = float(parameter["MinValue"])

for parameter in parameter_ranges["IntegerParameterRanges"]:
hyperparameters[parameter["Name"]] = int(parameter["MinValue"])

return hyperparameters

def hyperparameter_ranges(self):
"""Return the hyperparameter ranges in a dictionary to be used as part of a request for creating a
hyperparameter tuning job.
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/test_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@
"IntegerParameterRanges": [
{
"MaxValue": "100",
"Name": "mini_batch_size",
"Name": "num_components",
"MinValue": "10",
"ScalingType": "Auto",
}
Expand Down Expand Up @@ -416,7 +416,7 @@ def test_attach_tuning_job_with_estimator_from_hyperparameters(sagemaker_session
assert tuner.estimator.output_kms_key == ""

assert "_tuning_objective_metric" not in tuner.estimator.hyperparameters()
assert tuner.estimator.hyperparameters()["num_components"] == "1"
assert tuner.estimator.hyperparameters()["num_components"] == "10"


def test_attach_tuning_job_with_estimator_from_hyperparameters_with_early_stopping(
Expand Down