Skip to content

Commit 9dead38

Browse files
authored
feat: remove D205 to enable PEP257 Docstring Conventions (#2045)
* doc: update docstrings to comply with PEP257 * Remove/Edit docstrings to be compatible with PEP257 * doc: modified docstrings to comply with PEP257 standards * doc: modified docstrings to comply with PEP257 standards * doc: modified docstrings to comply with PEP257 standards * doc: modified docstrings to comply with PEP257 standards * fix: fixed conflicts to comply with latest version * fix: edited according to the changes requested * doc: edit docstrings to add blank line * fix: addessed changes Co-authored-by: Ahsan <[email protected]>
1 parent 5b15f30 commit 9dead38

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

79 files changed

+860
-1244
lines changed

.pydocstylerc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
[pydocstyle]
22
inherit = false
3-
ignore = D104,D107,D202,D203,D205,D212,D213,D214,D400,D401,D404,D406,D407,D411,D413,D414,D415,D417
3+
ignore = D104,D107,D202,D203,D212,D213,D214,D400,D401,D404,D406,D407,D411,D413,D414,D415,D417
44
match = (?!record_pb2).*\.py

src/sagemaker/algorithm.py

Lines changed: 13 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,9 @@
2525

2626

2727
class AlgorithmEstimator(EstimatorBase):
28-
"""A generic Estimator to train using any algorithm object (with an
29-
``algorithm_arn``). The Algorithm can be your own, or any Algorithm from AWS
28+
"""A generic Estimator to train using any algorithm object (with an ``algorithm_arn``).
29+
30+
The Algorithm can be your own, or any Algorithm from AWS
3031
Marketplace that you have a valid subscription for. This class will perform
3132
client-side validation on all the inputs.
3233
"""
@@ -212,10 +213,7 @@ def validate_train_spec(self):
212213
)
213214

214215
def set_hyperparameters(self, **kwargs):
215-
"""
216-
Args:
217-
**kwargs:
218-
"""
216+
"""Placeholder docstring"""
219217
for k, v in kwargs.items():
220218
value = self._validate_and_cast_hyperparameter(k, v)
221219
self.hyperparam_dict[k] = value
@@ -330,9 +328,9 @@ def transformer(
330328
role=None,
331329
volume_kms_key=None,
332330
):
333-
"""Return a ``Transformer`` that uses a SageMaker Model based on the
334-
training job. It reuses the SageMaker Session and base job name used by
335-
the Estimator.
331+
"""Return a ``Transformer`` that uses a SageMaker Model based on the training job.
332+
333+
It reuses the SageMaker Session and base job name used by the Estimator.
336334
337335
Args:
338336
instance_count (int): Number of EC2 instances to use.
@@ -413,32 +411,19 @@ def _prepare_for_training(self, job_name=None):
413411
# Validate hyperparameters
414412
# an explicit call to set_hyperparameters() will also validate the hyperparameters
415413
# but it is possible that the user never called it.
416-
"""
417-
Args:
418-
job_name:
419-
"""
420414
self._validate_and_set_default_hyperparameters()
421415

422416
super(AlgorithmEstimator, self)._prepare_for_training(job_name)
423417

424418
def fit(self, inputs=None, wait=True, logs=True, job_name=None):
425-
"""
426-
Args:
427-
inputs:
428-
wait:
429-
logs:
430-
job_name:
431-
"""
419+
"""Placeholder docstring"""
432420
if inputs:
433421
self._validate_input_channels(inputs)
434422

435423
super(AlgorithmEstimator, self).fit(inputs, wait, logs, job_name)
436424

437425
def _validate_input_channels(self, channels):
438-
"""
439-
Args:
440-
channels:
441-
"""
426+
"""Placeholder docstring"""
442427
train_spec = self.algorithm_spec["TrainingSpecification"]
443428
algorithm_name = self.algorithm_spec["AlgorithmName"]
444429
training_channels = {c["Name"]: c for c in train_spec["TrainingChannels"]}
@@ -456,11 +441,7 @@ def _validate_input_channels(self, channels):
456441
raise ValueError("Required input channel: %s Was not provided." % (name))
457442

458443
def _validate_and_cast_hyperparameter(self, name, v):
459-
"""
460-
Args:
461-
name:
462-
v:
463-
"""
444+
"""Placeholder docstring"""
464445
algorithm_name = self.algorithm_spec["AlgorithmName"]
465446

466447
if name not in self.hyperparameter_definitions:
@@ -514,11 +495,7 @@ def _parse_hyperparameters(self):
514495
return definitions
515496

516497
def _hyperparameter_range_and_class(self, parameter_type, hyperparameter):
517-
"""
518-
Args:
519-
parameter_type:
520-
hyperparameter:
521-
"""
498+
"""Placeholder docstring."""
522499
if parameter_type in self._hyperpameters_with_range:
523500
range_name = parameter_type + "ParameterRangeSpecification"
524501

@@ -559,10 +536,7 @@ def _hyperparameter_range_and_class(self, parameter_type, hyperparameter):
559536
return parameter_class, parameter_range
560537

561538
def _algorithm_training_input_modes(self, training_channels):
562-
"""
563-
Args:
564-
training_channels:
565-
"""
539+
"""Placeholder docstring"""
566540
current_input_modes = {"File", "Pipe"}
567541
for channel in training_channels:
568542
supported_input_modes = set(channel["SupportedInputModes"])
@@ -572,8 +546,7 @@ def _algorithm_training_input_modes(self, training_channels):
572546

573547
@classmethod
574548
def _prepare_init_params_from_job_description(cls, job_details, model_channel_name=None):
575-
"""Convert the job description to init params that can be handled by the
576-
class constructor
549+
"""Convert the job description to init params that can be handled by the class constructor.
577550
578551
Args:
579552
job_details (dict): the returned job details from a DescribeTrainingJob

src/sagemaker/amazon/amazon_estimator.py

Lines changed: 14 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,9 @@
3232

3333

3434
class AmazonAlgorithmEstimatorBase(EstimatorBase):
35-
"""Base class for Amazon first-party Estimator implementations. This class
36-
isn't intended to be instantiated directly.
35+
"""Base class for Amazon first-party Estimator implementations.
36+
37+
This class isn't intended to be instantiated directly.
3738
"""
3839

3940
feature_dim = hp("feature_dim", validation.gt(0), data_type=int)
@@ -110,10 +111,7 @@ def data_location(self):
110111

111112
@data_location.setter
112113
def data_location(self, data_location):
113-
"""
114-
Args:
115-
data_location:
116-
"""
114+
"""Placeholder docstring"""
117115
if not data_location.startswith("s3://"):
118116
raise ValueError(
119117
'Expecting an S3 URL beginning with "s3://". Got "{}"'.format(data_location)
@@ -124,8 +122,7 @@ def data_location(self, data_location):
124122

125123
@classmethod
126124
def _prepare_init_params_from_job_description(cls, job_details, model_channel_name=None):
127-
"""Convert the job description to init params that can be handled by the
128-
class constructor
125+
"""Convert the job description to init params that can be handled by the class constructor.
129126
130127
Args:
131128
job_details: the returned job details from a describe_training_job
@@ -245,8 +242,7 @@ def fit(
245242
self.latest_training_job.wait(logs=logs)
246243

247244
def record_set(self, train, labels=None, channel="train", encrypt=False):
248-
"""Build a :class:`~RecordSet` from a numpy :class:`~ndarray` matrix and
249-
label vector.
245+
"""Build a :class:`~RecordSet` from a numpy :class:`~ndarray` matrix and label vector.
250246
251247
For the 2D ``ndarray`` ``train``, each row is converted to a
252248
:class:`~Record` object. The vector is stored in the "values" entry of
@@ -301,8 +297,7 @@ class RecordSet(object):
301297
def __init__(
302298
self, s3_data, num_records, feature_dim, s3_data_type="ManifestFile", channel="train"
303299
):
304-
"""A collection of Amazon :class:~`Record` objects serialized and stored
305-
in S3.
300+
"""A collection of Amazon :class:~`Record` objects serialized and stored in S3.
306301
307302
Args:
308303
s3_data (str): The S3 location of the training data
@@ -328,9 +323,8 @@ def __repr__(self):
328323
return str((RecordSet, self.__dict__))
329324

330325
def data_channel(self):
331-
"""Return a dictionary to represent the training data in a channel for
332-
use with ``fit()``
333-
"""
326+
"""Returns dictionary to represent the training data in a channel to use with ``fit()``."""
327+
334328
return {self.channel: self.records_s3_input()}
335329

336330
def records_s3_input(self):
@@ -341,9 +335,7 @@ def records_s3_input(self):
341335

342336

343337
class FileSystemRecordSet(object):
344-
"""Amazon SageMaker channel configuration for a file system data source
345-
for Amazon algorithms.
346-
"""
338+
"""Amazon SageMaker channel configuration for file system data source for Amazon algorithms."""
347339

348340
def __init__(
349341
self,
@@ -390,11 +382,7 @@ def data_channel(self):
390382

391383

392384
def _build_shards(num_shards, array):
393-
"""
394-
Args:
395-
num_shards:
396-
array:
397-
"""
385+
"""Placeholder docstring"""
398386
if num_shards < 1:
399387
raise ValueError("num_shards must be >= 1")
400388
shard_size = int(array.shape[0] / num_shards)
@@ -408,8 +396,9 @@ def _build_shards(num_shards, array):
408396
def upload_numpy_to_s3_shards(
409397
num_shards, s3, bucket, key_prefix, array, labels=None, encrypt=False
410398
):
411-
"""Upload the training ``array`` and ``labels`` arrays to ``num_shards`` S3
412-
objects, stored in "s3:// ``bucket`` / ``key_prefix`` /". Optionally
399+
"""Upload the training ``array`` and ``labels`` arrays to ``num_shards``.
400+
401+
S3 objects, stored in "s3:// ``bucket`` / ``key_prefix`` /". Optionally
413402
``encrypt`` the S3 objects using AES-256.
414403
415404
Args:

src/sagemaker/amazon/common.py

Lines changed: 6 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -92,12 +92,7 @@ def deserialize(self, data, content_type):
9292

9393

9494
def _write_feature_tensor(resolved_type, record, vector):
95-
"""
96-
Args:
97-
resolved_type:
98-
record:
99-
vector:
100-
"""
95+
"""Placeholder Docstring"""
10196
if resolved_type == "Int32":
10297
record.features["values"].int32_tensor.values.extend(vector)
10398
elif resolved_type == "Float64":
@@ -107,12 +102,7 @@ def _write_feature_tensor(resolved_type, record, vector):
107102

108103

109104
def _write_label_tensor(resolved_type, record, scalar):
110-
"""
111-
Args:
112-
resolved_type:
113-
record:
114-
scalar:
115-
"""
105+
"""Placeholder Docstring"""
116106
if resolved_type == "Int32":
117107
record.label["values"].int32_tensor.values.extend([scalar])
118108
elif resolved_type == "Float64":
@@ -122,12 +112,7 @@ def _write_label_tensor(resolved_type, record, scalar):
122112

123113

124114
def _write_keys_tensor(resolved_type, record, vector):
125-
"""
126-
Args:
127-
resolved_type:
128-
record:
129-
vector:
130-
"""
115+
"""Placeholder Docstring"""
131116
if resolved_type == "Int32":
132117
record.features["values"].int32_tensor.keys.extend(vector)
133118
elif resolved_type == "Float64":
@@ -137,12 +122,7 @@ def _write_keys_tensor(resolved_type, record, vector):
137122

138123

139124
def _write_shape(resolved_type, record, scalar):
140-
"""
141-
Args:
142-
resolved_type:
143-
record:
144-
scalar:
145-
"""
125+
"""Placeholder Docstring"""
146126
if resolved_type == "Int32":
147127
record.features["values"].int32_tensor.shape.extend([scalar])
148128
elif resolved_type == "Float64":
@@ -285,10 +265,7 @@ def _write_recordio(f, data):
285265

286266

287267
def read_recordio(f):
288-
"""
289-
Args:
290-
f:
291-
"""
268+
"""Placeholder Docstring"""
292269
while True:
293270
try:
294271
(read_kmagic,) = struct.unpack("I", f.read(4))
@@ -303,10 +280,7 @@ def read_recordio(f):
303280

304281

305282
def _resolve_type(dtype):
306-
"""
307-
Args:
308-
dtype:
309-
"""
283+
"""Placeholder Docstring"""
310284
if dtype == np.dtype(int):
311285
return "Int32"
312286
if dtype == np.dtype(float):

src/sagemaker/amazon/factorization_machines.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -112,8 +112,7 @@ def __init__(
112112
factors_init_value=None,
113113
**kwargs
114114
):
115-
"""Factorization Machines is :class:`Estimator` for general-purpose
116-
supervised learning.
115+
"""Factorization Machines is :class:`Estimator` for general-purpose supervised learning.
117116
118117
Amazon SageMaker Factorization Machines is a general-purpose
119118
supervised learning algorithm that you can use for both classification
@@ -247,8 +246,9 @@ def __init__(
247246
self.factors_init_value = factors_init_value
248247

249248
def create_model(self, vpc_config_override=VPC_CONFIG_DEFAULT, **kwargs):
250-
"""Return a :class:`~sagemaker.amazon.FactorizationMachinesModel`
251-
referencing the latest s3 model data produced by this Estimator.
249+
"""Return a :class:`~sagemaker.amazon.FactorizationMachinesModel`.
250+
251+
It references the latest s3 model data produced by this Estimator.
252252
253253
Args:
254254
vpc_config_override (dict[str, list[str]]): Optional override for VpcConfig set on
@@ -267,8 +267,7 @@ def create_model(self, vpc_config_override=VPC_CONFIG_DEFAULT, **kwargs):
267267

268268

269269
class FactorizationMachinesPredictor(Predictor):
270-
"""Performs binary-classification or regression prediction from input
271-
vectors.
270+
"""Performs binary-classification or regression prediction from input vectors.
272271
273272
The implementation of
274273
:meth:`~sagemaker.predictor.Predictor.predict` in this
@@ -292,6 +291,8 @@ def __init__(
292291
deserializer=RecordDeserializer(),
293292
):
294293
"""
294+
Initialization for FactorizationMachinesPredictor class.
295+
295296
Args:
296297
endpoint_name (str): Name of the Amazon SageMaker endpoint to which
297298
requests are sent.
@@ -314,12 +315,15 @@ def __init__(
314315

315316
class FactorizationMachinesModel(Model):
316317
"""Reference S3 model data created by FactorizationMachines estimator.
318+
317319
Calling :meth:`~sagemaker.model.Model.deploy` creates an Endpoint and
318320
returns :class:`FactorizationMachinesPredictor`.
319321
"""
320322

321323
def __init__(self, model_data, role, sagemaker_session=None, **kwargs):
322324
"""
325+
Initialization for FactorizationMachinesModel class.
326+
323327
Args:
324328
model_data (str): The S3 location of a SageMaker model data
325329
``.tar.gz`` file.

0 commit comments

Comments
 (0)