@@ -392,6 +392,7 @@ def _normalize_outputs(self, outputs=None):
392
392
output .destination = s3_uri
393
393
normalized_outputs .append (output )
394
394
return normalized_outputs
395
+ return normalized_outputs
395
396
396
397
397
398
class ScriptProcessor (Processor ):
@@ -1622,7 +1623,7 @@ def run( # type: ignore[override]
1622
1623
:class:`~sagemaker.workflow.pipeline_context.PipelineSession`
1623
1624
"""
1624
1625
s3_runproc_sh , inputs , job_name = self ._pack_and_upload_code (
1625
- code , source_dir , dependencies , git_config , job_name , inputs
1626
+ code , source_dir , dependencies , git_config , job_name , inputs , kms_key
1626
1627
)
1627
1628
1628
1629
# Submit a processing job.
@@ -1638,7 +1639,9 @@ def run( # type: ignore[override]
1638
1639
kms_key = kms_key ,
1639
1640
)
1640
1641
1641
- def _pack_and_upload_code (self , code , source_dir , dependencies , git_config , job_name , inputs ):
1642
+ def _pack_and_upload_code (
1643
+ self , code , source_dir , dependencies , git_config , job_name , inputs , kms_key = None
1644
+ ):
1642
1645
"""Pack local code bundle and upload to Amazon S3."""
1643
1646
if code .startswith ("s3://" ):
1644
1647
return code , inputs , job_name
@@ -1676,6 +1679,7 @@ def _pack_and_upload_code(self, code, source_dir, dependencies, git_config, job_
1676
1679
s3_runproc_sh = S3Uploader .upload_string_as_file_body (
1677
1680
self ._generate_framework_script (script ),
1678
1681
desired_s3_uri = entrypoint_s3_uri ,
1682
+ kms_key = kms_key ,
1679
1683
sagemaker_session = self .sagemaker_session ,
1680
1684
)
1681
1685
logger .info ("runproc.sh uploaded to %s" , s3_runproc_sh )
0 commit comments