Skip to content

Commit 6f0b20b

Browse files
author
Payton Staub
committed
Fix linter and documentation errors
1 parent 23a5fe2 commit 6f0b20b

File tree

4 files changed

+22
-119
lines changed

4 files changed

+22
-119
lines changed

src/sagemaker/processing.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -449,10 +449,12 @@ def get_run_args(
449449
outputs=None,
450450
arguments=None,
451451
):
452-
"""Returns a RunArgs object. For processors (:class:`~sagemaker.spark.processing.PySparkProcessor`,
453-
:class:`~sagemaker.spark.processing.SparkJar`) that have special
454-
run() arguments, this object contains the normalized arguments for passing to
455-
:class:`~sagemaker.workflow.steps.ProcessingStep`.
452+
"""Returns a RunArgs object.
453+
454+
For processors (:class:`~sagemaker.spark.processing.PySparkProcessor`,
455+
:class:`~sagemaker.spark.processing.SparkJar`) that have special
456+
run() arguments, this object contains the normalized arguments for passing to
457+
:class:`~sagemaker.workflow.steps.ProcessingStep`.
456458
457459
Args:
458460
code (str): This can be an S3 URI or a local path to a file with the framework
@@ -1171,12 +1173,11 @@ def _to_request_dict(self):
11711173

11721174

11731175
class RunArgs(object):
1174-
"""Provides an object containing the standard run arguments needed by
1175-
:class:`~sagemaker.processing.ScriptProcessor`.
1176+
"""Accepts parameters that correspond to ScriptProcessors.
11761177
11771178
An instance of this class is returned from the ``get_run_args()`` method on processors,
1178-
and is used for normalizing the arguments so that they can be passed to
1179-
:class:`~sagemaker.workflow.steps.ProcessingStep`
1179+
and is used for normalizing the arguments so that they can be passed to
1180+
:class:`~sagemaker.workflow.steps.ProcessingStep`
11801181
"""
11811182

11821183
def __init__(

src/sagemaker/spark/processing.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,9 @@ def get_run_args(
178178
outputs=None,
179179
arguments=None,
180180
):
181-
"""Returns a RunArgs object. For processors (:class:`~sagemaker.spark.processing.PySparkProcessor`,
181+
"""Returns a RunArgs object.
182+
183+
For processors (:class:`~sagemaker.spark.processing.PySparkProcessor`,
182184
:class:`~sagemaker.spark.processing.SparkJar`) that have special
183185
run() arguments, this object contains the normalized arguments for passing to
184186
:class:`~sagemaker.workflow.steps.ProcessingStep`.
@@ -729,8 +731,11 @@ def get_run_args(
729731
configuration=None,
730732
spark_event_logs_s3_uri=None,
731733
):
732-
"""Returns a RunArgs object. This object contains the normalized inputs, outputs
733-
and arguments needed when using a ``PySparkProcessor`` in a :class:`~sagemaker.workflow.steps.ProcessingStep`.
734+
"""Returns a RunArgs object.
735+
736+
This object contains the normalized inputs, outputs
737+
and arguments needed when using a ``PySparkProcessor``
738+
in a :class:`~sagemaker.workflow.steps.ProcessingStep`.
734739
735740
Args:
736741
submit_app (str): Path (local or S3) to Python file to submit to Spark
@@ -974,8 +979,11 @@ def get_run_args(
974979
configuration=None,
975980
spark_event_logs_s3_uri=None,
976981
):
977-
"""Returns a RunArgs object. This object contains the normalized inputs, outputs
978-
and arguments needed when using a ``SparkJarProcessor`` in a :class:`~sagemaker.workflow.steps.ProcessingStep`.
982+
"""Returns a RunArgs object.
983+
984+
This object contains the normalized inputs, outputs
985+
and arguments needed when using a ``SparkJarProcessor``
986+
in a :class:`~sagemaker.workflow.steps.ProcessingStep`.
979987
980988
Args:
981989
submit_app (str): Path (local or S3) to Python file to submit to Spark

tests/unit/sagemaker/spark/test_processing.py

Lines changed: 0 additions & 105 deletions
Original file line numberDiff line numberDiff line change
@@ -246,7 +246,6 @@ def test_spark_processor_base_run(mock_super_run, spark_processor_base):
246246
)
247247
@patch("sagemaker.spark.processing.ProcessingOutput")
248248
@patch("sagemaker.spark.processing._SparkProcessorBase._stage_configuration")
249-
@patch("sagemaker.processing.ScriptProcessor.run")
250249
def test_spark_processor_base_extend_processing_args(
251250
mock_super_run,
252251
mock_stage_configuration,
@@ -270,13 +269,6 @@ def test_spark_processor_base_extend_processing_args(
270269
assert extended_outputs == expected["outputs"]
271270

272271

273-
@patch("sagemaker.processing.ScriptProcessor.run")
274-
def test_spark_processor_base_run(mock_super_run, spark_processor_base):
275-
spark_processor_base.run(submit_app="app")
276-
277-
mock_super_run.assert_called_with("app", None, None, None, True, True, None, None, None)
278-
279-
280272
serialized_configuration = BytesIO("test".encode("utf-8"))
281273

282274

@@ -863,103 +855,6 @@ def test_py_spark_processor_get_run_args(
863855
)
864856

865857

866-
@pytest.mark.parametrize(
867-
"config, expected",
868-
[
869-
(
870-
{
871-
"submit_app": None,
872-
"files": ["test"],
873-
"inputs": [],
874-
"opt": None,
875-
"arguments": ["arg1"],
876-
},
877-
ValueError,
878-
),
879-
(
880-
{
881-
"submit_app": "test.py",
882-
"files": None,
883-
"inputs": [processing_input],
884-
"opt": None,
885-
"arguments": ["arg1"],
886-
},
887-
[processing_input],
888-
),
889-
(
890-
{
891-
"submit_app": "test.py",
892-
"files": ["test"],
893-
"inputs": [processing_input],
894-
"opt": None,
895-
"arguments": ["arg1"],
896-
},
897-
[processing_input, processing_input, processing_input, processing_input],
898-
),
899-
(
900-
{
901-
"submit_app": "test.py",
902-
"files": ["test"],
903-
"inputs": None,
904-
"opt": None,
905-
"arguments": ["arg1"],
906-
},
907-
[processing_input, processing_input, processing_input],
908-
),
909-
(
910-
{
911-
"submit_app": "test.py",
912-
"files": ["test"],
913-
"inputs": None,
914-
"opt": "opt",
915-
"arguments": ["arg1"],
916-
},
917-
[processing_input, processing_input, processing_input],
918-
),
919-
],
920-
)
921-
@patch("sagemaker.spark.processing._SparkProcessorBase.get_run_args")
922-
@patch("sagemaker.spark.processing._SparkProcessorBase._stage_submit_deps")
923-
@patch("sagemaker.spark.processing._SparkProcessorBase._generate_current_job_name")
924-
def test_py_spark_processor_get_run_args(
925-
mock_generate_current_job_name,
926-
mock_stage_submit_deps,
927-
mock_super_get_run_args,
928-
py_spark_processor,
929-
config,
930-
expected,
931-
):
932-
mock_stage_submit_deps.return_value = (processing_input, "opt")
933-
mock_generate_current_job_name.return_value = "jobName"
934-
935-
if expected is ValueError:
936-
with pytest.raises(expected):
937-
py_spark_processor.get_run_args(
938-
submit_app=config["submit_app"],
939-
submit_py_files=config["files"],
940-
submit_jars=config["files"],
941-
submit_files=config["files"],
942-
inputs=config["inputs"],
943-
arguments=config["arguments"],
944-
)
945-
else:
946-
py_spark_processor.get_run_args(
947-
submit_app=config["submit_app"],
948-
submit_py_files=config["files"],
949-
submit_jars=config["files"],
950-
submit_files=config["files"],
951-
inputs=config["inputs"],
952-
arguments=config["arguments"],
953-
)
954-
955-
mock_super_get_run_args.assert_called_with(
956-
code=config["submit_app"],
957-
inputs=expected,
958-
outputs=None,
959-
arguments=config["arguments"],
960-
)
961-
962-
963858
@patch("sagemaker.spark.processing._SparkProcessorBase.run")
964859
@patch("sagemaker.spark.processing._SparkProcessorBase._stage_submit_deps")
965860
@patch("sagemaker.spark.processing._SparkProcessorBase._generate_current_job_name")

tests/unit/test_processing.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@
2727
Processor,
2828
ScriptProcessor,
2929
ProcessingJob,
30-
RunArgs,
3130
)
3231
from sagemaker.sklearn.processing import SKLearnProcessor
3332
from sagemaker.network import NetworkConfig

0 commit comments

Comments
 (0)