Skip to content

Commit 425bf38

Browse files
committed
doc: modified docstrings to comply with PEP257 standards
1 parent 275f6d3 commit 425bf38

File tree

5 files changed

+138
-149
lines changed

5 files changed

+138
-149
lines changed

src/sagemaker/amazon/amazon_estimator.py

Lines changed: 14 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,9 @@
3232

3333

3434
class AmazonAlgorithmEstimatorBase(EstimatorBase):
35-
"""Base class for Amazon first-party Estimator implementations. This class
36-
isn't intended to be instantiated directly.
35+
"""Base class for Amazon first-party Estimator implementations.
36+
37+
This class isn't intended to be instantiated directly.
3738
"""
3839

3940
feature_dim = hp("feature_dim", validation.gt(0), data_type=int)
@@ -110,10 +111,7 @@ def data_location(self):
110111

111112
@data_location.setter
112113
def data_location(self, data_location):
113-
"""
114-
Args:
115-
data_location:
116-
"""
114+
"""Placeholder docstring"""
117115
if not data_location.startswith("s3://"):
118116
raise ValueError(
119117
'Expecting an S3 URL beginning with "s3://". Got "{}"'.format(data_location)
@@ -124,8 +122,7 @@ def data_location(self, data_location):
124122

125123
@classmethod
126124
def _prepare_init_params_from_job_description(cls, job_details, model_channel_name=None):
127-
"""Convert the job description to init params that can be handled by the
128-
class constructor
125+
"""Convert the job description to init params that can be handled by the class constructor.
129126
130127
Args:
131128
job_details: the returned job details from a describe_training_job
@@ -245,8 +242,7 @@ def fit(
245242
self.latest_training_job.wait(logs=logs)
246243

247244
def record_set(self, train, labels=None, channel="train", encrypt=False):
248-
"""Build a :class:`~RecordSet` from a numpy :class:`~ndarray` matrix and
249-
label vector.
245+
"""Build a :class:`~RecordSet` from a numpy :class:`~ndarray` matrix and label vector.
250246
251247
For the 2D ``ndarray`` ``train``, each row is converted to a
252248
:class:`~Record` object. The vector is stored in the "values" entry of
@@ -301,8 +297,7 @@ class RecordSet(object):
301297
def __init__(
302298
self, s3_data, num_records, feature_dim, s3_data_type="ManifestFile", channel="train"
303299
):
304-
"""A collection of Amazon :class:~`Record` objects serialized and stored
305-
in S3.
300+
"""A collection of Amazon :class:~`Record` objects serialized and stored in S3.
306301
307302
Args:
308303
s3_data (str): The S3 location of the training data
@@ -328,9 +323,8 @@ def __repr__(self):
328323
return str((RecordSet, self.__dict__))
329324

330325
def data_channel(self):
331-
"""Return a dictionary to represent the training data in a channel for
332-
use with ``fit()``
333-
"""
326+
"""Returns dictionary to represent the training data in a channel to use with ``fit()``."""
327+
334328
return {self.channel: self.records_s3_input()}
335329

336330
def records_s3_input(self):
@@ -341,9 +335,7 @@ def records_s3_input(self):
341335

342336

343337
class FileSystemRecordSet(object):
344-
"""Amazon SageMaker channel configuration for a file system data source
345-
for Amazon algorithms.
346-
"""
338+
"""Amazon SageMaker channel configuration for file system data source for Amazon algorithms."""
347339

348340
def __init__(
349341
self,
@@ -390,11 +382,7 @@ def data_channel(self):
390382

391383

392384
def _build_shards(num_shards, array):
393-
"""
394-
Args:
395-
num_shards:
396-
array:
397-
"""
385+
"""Placeholder docstring"""
398386
if num_shards < 1:
399387
raise ValueError("num_shards must be >= 1")
400388
shard_size = int(array.shape[0] / num_shards)
@@ -408,8 +396,9 @@ def _build_shards(num_shards, array):
408396
def upload_numpy_to_s3_shards(
409397
num_shards, s3, bucket, key_prefix, array, labels=None, encrypt=False
410398
):
411-
"""Upload the training ``array`` and ``labels`` arrays to ``num_shards`` S3
412-
objects, stored in "s3:// ``bucket`` / ``key_prefix`` /". Optionally
399+
"""Upload the training ``array`` and ``labels`` arrays to ``num_shards``.
400+
401+
S3 objects, stored in "s3:// ``bucket`` / ``key_prefix`` /". Optionally
413402
``encrypt`` the S3 objects using AES-256.
414403
415404
Args:

src/sagemaker/analytics.py

Lines changed: 18 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
1111
# ANY KIND, either express or implied. See the License for the specific
1212
# language governing permissions and limitations under the License.
13-
"""Placeholder docstring."""
13+
"""Placeholder docstring"""
1414
from __future__ import print_function, absolute_import
1515

1616
from abc import ABCMeta, abstractmethod
@@ -38,6 +38,7 @@
3838

3939
class AnalyticsMetricsBase(with_metaclass(ABCMeta, object)):
4040
"""Base class for tuning job or training job analytics classes.
41+
4142
Understands common functionality like persistence and caching.
4243
"""
4344

@@ -54,8 +55,9 @@ def export_csv(self, filename):
5455

5556
def dataframe(self, force_refresh=False):
5657
"""A pandas dataframe with lots of interesting results about this object.
57-
Created by calling SageMaker List and Describe APIs and
58-
converting them into a convenient tabular summary.
58+
59+
Created by calling SageMaker List and Describe APIs and converting them into a
60+
convenient tabular summary.
5961
6062
Args:
6163
force_refresh (bool): Set to True to fetch the latest data from
@@ -73,8 +75,8 @@ def _fetch_dataframe(self):
7375

7476
def clear_cache(self):
7577
"""Clear the object of all local caches of API methods.
76-
So that the next time any properties are accessed they will be refreshed from the
77-
service.
78+
79+
So that the next time any properties are accessed they will be refreshed from the service.
7880
"""
7981
self._dataframe = None
8082

@@ -103,7 +105,7 @@ def __init__(self, hyperparameter_tuning_job_name, sagemaker_session=None):
103105

104106
@property
105107
def name(self):
106-
"""Name of the HyperparameterTuningJob being analyzed."""
108+
"""Name of the HyperparameterTuningJob being analyzed"""
107109
return self._tuning_job_name
108110

109111
def __repr__(self):
@@ -156,6 +158,7 @@ def reshape(training_summary):
156158
@property
157159
def tuning_ranges(self):
158160
"""A dictionary describing the ranges of all tuned hyperparameters.
161+
159162
The keys are the names of the hyperparameter, and the values are the ranges.
160163
161164
The output can take one of two forms:
@@ -207,7 +210,7 @@ def tuning_ranges(self):
207210
}
208211

209212
def _prepare_parameter_ranges(self, parameter_ranges):
210-
"""Convert parameter ranges a dictionary using the parameter range names as the keys."""
213+
"""Convert parameter ranges a dictionary using the parameter range names as the keys"""
211214
out = {}
212215
for _, ranges in parameter_ranges.items():
213216
for param in ranges:
@@ -313,7 +316,7 @@ def __init__(
313316

314317
@property
315318
def name(self):
316-
"""Name of the TrainingJob being analyzed."""
319+
"""Name of the TrainingJob being analyzed"""
317320
return self._training_job_name
318321

319322
def __repr__(self):
@@ -360,7 +363,7 @@ def _fetch_dataframe(self):
360363
return pd.DataFrame(self._data)
361364

362365
def _fetch_metric(self, metric_name):
363-
"""Fetch all the values of a named metric, and add them to _data.
366+
"""Fetch all the values of a named metric, and add them to _data
364367
365368
Args:
366369
metric_name: The metric name to fetch.
@@ -564,6 +567,7 @@ def clear_cache(self):
564567

565568
def _reshape_parameters(self, parameters):
566569
"""Reshape trial component parameters to a pandas column.
570+
567571
Args:
568572
parameters: trial component parameters
569573
Returns:
@@ -578,6 +582,7 @@ def _reshape_parameters(self, parameters):
578582

579583
def _reshape_metrics(self, metrics):
580584
"""Reshape trial component metrics to a pandas column.
585+
581586
Args:
582587
metrics: trial component metrics
583588
Returns:
@@ -598,6 +603,7 @@ def _reshape_metrics(self, metrics):
598603

599604
def _reshape_artifacts(self, artifacts, _artifact_names):
600605
"""Reshape trial component input/output artifacts to a pandas column.
606+
601607
Args:
602608
artifacts: trial component input/output artifacts
603609
Returns:
@@ -631,6 +637,7 @@ def _reshape_parents(self, parents):
631637

632638
def _reshape(self, trial_component):
633639
"""Reshape trial component data to pandas columns.
640+
634641
Args:
635642
trial_component: dict representing a trial component
636643
Returns:
@@ -660,9 +667,8 @@ def _reshape(self, trial_component):
660667
return out
661668

662669
def _fetch_dataframe(self):
663-
"""Return a pandas dataframe with all the trial_components,
664-
along with their parameters and metrics.
665-
"""
670+
"""Return a pandas dataframe includes all the trial_components."""
671+
666672
df = pd.DataFrame([self._reshape(component) for component in self._get_trial_components()])
667673
return df
668674

0 commit comments

Comments
 (0)