15
15
16
16
import dataclasses
17
17
import json
18
- import os
19
18
import sys
20
19
import hmac
21
20
import hashlib
29
28
30
29
from tblib import pickling_support
31
30
31
+ # Note: do not use os.path.join for s3 uris, fails on windows
32
+
32
33
33
34
def _get_python_version ():
34
35
return f"{ sys .version_info .major } .{ sys .version_info .minor } .{ sys .version_info .micro } "
@@ -143,18 +144,15 @@ def serialize_func_to_s3(
143
144
Raises:
144
145
SerializationError: when fail to serialize function to bytes.
145
146
"""
146
-
147
147
bytes_to_upload = CloudpickleSerializer .serialize (func )
148
148
149
- _upload_bytes_to_s3 (
150
- bytes_to_upload , os .path .join (s3_uri , "payload.pkl" ), s3_kms_key , sagemaker_session
151
- )
149
+ _upload_bytes_to_s3 (bytes_to_upload , f"{ s3_uri } /payload.pkl" , s3_kms_key , sagemaker_session )
152
150
153
151
sha256_hash = _compute_hash (bytes_to_upload , secret_key = hmac_key )
154
152
155
153
_upload_bytes_to_s3 (
156
154
_MetaData (sha256_hash ).to_json (),
157
- os . path . join ( s3_uri , " metadata.json") ,
155
+ f" { s3_uri } / metadata.json" ,
158
156
s3_kms_key ,
159
157
sagemaker_session ,
160
158
)
@@ -177,20 +175,16 @@ def deserialize_func_from_s3(sagemaker_session: Session, s3_uri: str, hmac_key:
177
175
DeserializationError: when fail to serialize function to bytes.
178
176
"""
179
177
metadata = _MetaData .from_json (
180
- _read_bytes_from_s3 (os . path . join ( s3_uri , " metadata.json") , sagemaker_session )
178
+ _read_bytes_from_s3 (f" { s3_uri } / metadata.json" , sagemaker_session )
181
179
)
182
180
183
- bytes_to_deserialize = _read_bytes_from_s3 (
184
- os .path .join (s3_uri , "payload.pkl" ), sagemaker_session
185
- )
181
+ bytes_to_deserialize = _read_bytes_from_s3 (f"{ s3_uri } /payload.pkl" , sagemaker_session )
186
182
187
183
_perform_integrity_check (
188
184
expected_hash_value = metadata .sha256_hash , secret_key = hmac_key , buffer = bytes_to_deserialize
189
185
)
190
186
191
- return CloudpickleSerializer .deserialize (
192
- os .path .join (s3_uri , "payload.pkl" ), bytes_to_deserialize
193
- )
187
+ return CloudpickleSerializer .deserialize (f"{ s3_uri } /payload.pkl" , bytes_to_deserialize )
194
188
195
189
196
190
def serialize_obj_to_s3 (
@@ -211,15 +205,13 @@ def serialize_obj_to_s3(
211
205
212
206
bytes_to_upload = CloudpickleSerializer .serialize (obj )
213
207
214
- _upload_bytes_to_s3 (
215
- bytes_to_upload , os .path .join (s3_uri , "payload.pkl" ), s3_kms_key , sagemaker_session
216
- )
208
+ _upload_bytes_to_s3 (bytes_to_upload , f"{ s3_uri } /payload.pkl" , s3_kms_key , sagemaker_session )
217
209
218
210
sha256_hash = _compute_hash (bytes_to_upload , secret_key = hmac_key )
219
211
220
212
_upload_bytes_to_s3 (
221
213
_MetaData (sha256_hash ).to_json (),
222
- os . path . join ( s3_uri , " metadata.json") ,
214
+ f" { s3_uri } / metadata.json" ,
223
215
s3_kms_key ,
224
216
sagemaker_session ,
225
217
)
@@ -240,20 +232,16 @@ def deserialize_obj_from_s3(sagemaker_session: Session, s3_uri: str, hmac_key: s
240
232
"""
241
233
242
234
metadata = _MetaData .from_json (
243
- _read_bytes_from_s3 (os . path . join ( s3_uri , " metadata.json") , sagemaker_session )
235
+ _read_bytes_from_s3 (f" { s3_uri } / metadata.json" , sagemaker_session )
244
236
)
245
237
246
- bytes_to_deserialize = _read_bytes_from_s3 (
247
- os .path .join (s3_uri , "payload.pkl" ), sagemaker_session
248
- )
238
+ bytes_to_deserialize = _read_bytes_from_s3 (f"{ s3_uri } /payload.pkl" , sagemaker_session )
249
239
250
240
_perform_integrity_check (
251
241
expected_hash_value = metadata .sha256_hash , secret_key = hmac_key , buffer = bytes_to_deserialize
252
242
)
253
243
254
- return CloudpickleSerializer .deserialize (
255
- os .path .join (s3_uri , "payload.pkl" ), bytes_to_deserialize
256
- )
244
+ return CloudpickleSerializer .deserialize (f"{ s3_uri } /payload.pkl" , bytes_to_deserialize )
257
245
258
246
259
247
def serialize_exception_to_s3 (
@@ -275,15 +263,13 @@ def serialize_exception_to_s3(
275
263
276
264
bytes_to_upload = CloudpickleSerializer .serialize (exc )
277
265
278
- _upload_bytes_to_s3 (
279
- bytes_to_upload , os .path .join (s3_uri , "payload.pkl" ), s3_kms_key , sagemaker_session
280
- )
266
+ _upload_bytes_to_s3 (bytes_to_upload , f"{ s3_uri } /payload.pkl" , s3_kms_key , sagemaker_session )
281
267
282
268
sha256_hash = _compute_hash (bytes_to_upload , secret_key = hmac_key )
283
269
284
270
_upload_bytes_to_s3 (
285
271
_MetaData (sha256_hash ).to_json (),
286
- os . path . join ( s3_uri , " metadata.json") ,
272
+ f" { s3_uri } / metadata.json" ,
287
273
s3_kms_key ,
288
274
sagemaker_session ,
289
275
)
@@ -304,20 +290,16 @@ def deserialize_exception_from_s3(sagemaker_session: Session, s3_uri: str, hmac_
304
290
"""
305
291
306
292
metadata = _MetaData .from_json (
307
- _read_bytes_from_s3 (os . path . join ( s3_uri , " metadata.json") , sagemaker_session )
293
+ _read_bytes_from_s3 (f" { s3_uri } / metadata.json" , sagemaker_session )
308
294
)
309
295
310
- bytes_to_deserialize = _read_bytes_from_s3 (
311
- os .path .join (s3_uri , "payload.pkl" ), sagemaker_session
312
- )
296
+ bytes_to_deserialize = _read_bytes_from_s3 (f"{ s3_uri } /payload.pkl" , sagemaker_session )
313
297
314
298
_perform_integrity_check (
315
299
expected_hash_value = metadata .sha256_hash , secret_key = hmac_key , buffer = bytes_to_deserialize
316
300
)
317
301
318
- return CloudpickleSerializer .deserialize (
319
- os .path .join (s3_uri , "payload.pkl" ), bytes_to_deserialize
320
- )
302
+ return CloudpickleSerializer .deserialize (f"{ s3_uri } /payload.pkl" , bytes_to_deserialize )
321
303
322
304
323
305
def _upload_bytes_to_s3 (bytes , s3_uri , s3_kms_key , sagemaker_session ):
0 commit comments