Skip to content

Make inputs optional for hyperparameter tuning job. #490

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Nov 16, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ CHANGELOG
* doc-fix: Fix typos in tensorflow serving documentation
* doc-fix: Add estimator base classes to API docs
* feature: HyperparameterTuner: add support for Automatic Model Tuning's Warm Start Jobs
* feature: HyperparameterTuner: Make input channels optional

1.14.2
======
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def read(fname):
],

# Declare minimal set for installation
install_requires=['boto3>=1.9.38', 'numpy>=1.9.0', 'protobuf>=3.1', 'scipy>=0.19.0',
install_requires=['boto3>=1.9.45', 'numpy>=1.9.0', 'protobuf>=3.1', 'scipy>=0.19.0',
'urllib3 >=1.21', 'PyYAML>=3.2', 'protobuf3-to-dict>=0.1.5',
'docker-compose>=1.23.0'],

Expand Down
4 changes: 3 additions & 1 deletion src/sagemaker/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,13 +348,15 @@ def tune(self, job_name, strategy, objective_type, objective_metric_name,
'TrainingInputMode': input_mode,
},
'RoleArn': role,
'InputDataConfig': input_config,
'OutputDataConfig': output_config,
'ResourceConfig': resource_config,
'StoppingCondition': stop_condition,
}
}

if input_config is not None:
tune_request['TrainingJobDefinition']['InputDataConfig'] = input_config

if warm_start_config:
tune_request['WarmStartConfig'] = warm_start_config

Expand Down
2 changes: 1 addition & 1 deletion src/sagemaker/tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@ def _prepare_for_training(self, job_name=None, include_cls_metadata=True):
self.estimator.__class__.__name__)
self.static_hyperparameters[self.SAGEMAKER_ESTIMATOR_MODULE] = json.dumps(self.estimator.__module__)

def fit(self, inputs, job_name=None, include_cls_metadata=True, **kwargs):
def fit(self, inputs=None, job_name=None, include_cls_metadata=True, **kwargs):
"""Start a hyperparameter tuning job.

Args:
Expand Down
21 changes: 21 additions & 0 deletions tests/unit/test_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import copy
import json

import os
import pytest
from mock import Mock

Expand All @@ -26,6 +27,8 @@
HyperparameterTuner, _TuningJob, WarmStartConfig, create_identical_dataset_and_algorithm_tuner, \
create_transfer_learning_tuner, WarmStartTypes
from sagemaker.mxnet import MXNet

DATA_DIR = os.path.join(os.path.dirname(__file__), '..', 'data')
MODEL_DATA = "s3://bucket/model.tar.gz"

JOB_NAME = 'tuning_job'
Expand Down Expand Up @@ -488,6 +491,22 @@ def test_delete_endpoint(tuner):
tuner.sagemaker_session.delete_endpoint.assert_called_with(JOB_NAME)


def test_fit_no_inputs(tuner, sagemaker_session):
script_path = os.path.join(DATA_DIR, 'mxnet_mnist', 'failure_script.py')
tuner.estimator = MXNet(entry_point=script_path,
role=ROLE,
framework_version=FRAMEWORK_VERSION,
train_instance_count=TRAIN_INSTANCE_COUNT,
train_instance_type=TRAIN_INSTANCE_TYPE,
sagemaker_session=sagemaker_session)

tuner.fit()

_, _, tune_kwargs = sagemaker_session.tune.mock_calls[0]

assert tune_kwargs['input_config'] is None


def test_identical_dataset_and_algorithm_tuner(sagemaker_session):
job_details = copy.deepcopy(TUNING_JOB_DETAILS)
sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job = Mock(name='describe_tuning_job',
Expand Down Expand Up @@ -523,6 +542,8 @@ def test_transfer_learning_tuner(sagemaker_session):
assert parent_tuner.warm_start_config.type == WarmStartTypes.TRANSFER_LEARNING
assert parent_tuner.warm_start_config.parents == {tuner.latest_tuning_job.name, "p1", "p2"}
assert parent_tuner.estimator == tuner.estimator


#################################################################################
# _ParameterRange Tests

Expand Down