Skip to content

Commit b97ff89

Browse files
committed
add unit test for command reset
1 parent 2f30e85 commit b97ff89

File tree

2 files changed

+31
-1
lines changed

2 files changed

+31
-1
lines changed

src/sagemaker/spark/processing.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -738,7 +738,7 @@ def run(
738738
"""
739739
self._current_job_name = self._generate_current_job_name(job_name=job_name)
740740
self.command = [_SparkProcessorBase._default_command]
741-
741+
742742
if not submit_app:
743743
raise ValueError("submit_app is required")
744744

tests/unit/sagemaker/spark/test_processing.py

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -738,6 +738,36 @@ def test_py_spark_processor_run(
738738
)
739739

740740

741+
@patch("sagemaker.spark.processing._SparkProcessorBase.run")
742+
@patch("sagemaker.spark.processing._SparkProcessorBase._stage_submit_deps")
743+
@patch("sagemaker.spark.processing._SparkProcessorBase._generate_current_job_name")
744+
def test_py_spark_processor_run_twice(
745+
mock_generate_current_job_name,
746+
mock_stage_submit_deps,
747+
mock_super_run,
748+
py_spark_processor
749+
):
750+
mock_stage_submit_deps.return_value = (processing_input, "opt")
751+
mock_generate_current_job_name.return_value = "jobName"
752+
expected_command = ['smspark-submit', '--py-files', 'opt', '--jars', 'opt', '--files', 'opt']
753+
py_spark_processor.run(
754+
submit_app="submit_app",
755+
submit_py_files="files",
756+
submit_jars="test",
757+
submit_files="test",
758+
inputs=[],
759+
)
760+
761+
py_spark_processor.run(
762+
submit_app="submit_app",
763+
submit_py_files="files",
764+
submit_jars="test",
765+
submit_files="test",
766+
inputs=[],
767+
)
768+
769+
assert py_spark_processor.command == expected_command
770+
741771
@pytest.mark.parametrize(
742772
"config, expected",
743773
[

0 commit comments

Comments
 (0)