@@ -276,12 +276,12 @@ def _run_multi_process(self, data_frame: DataFrame, wait=True, timeout=None):
276
276
for i in range (self .max_processes ):
277
277
start_index = min (i * batch_size , data_frame .shape [0 ])
278
278
end_index = min (i * batch_size + batch_size , data_frame .shape [0 ])
279
- args += [\
280
- ( self .max_workers ,\
281
- self .feature_group_name , \
282
- self .sagemaker_fs_runtime_client_config , \
283
- data_frame [start_index :end_index ], \
284
- start_index , \
279
+ args += [(
280
+ self .max_workers ,
281
+ self .feature_group_name ,
282
+ self .sagemaker_fs_runtime_client_config ,
283
+ data_frame [start_index :end_index ],
284
+ start_index ,
285
285
timeout )]
286
286
287
287
def init_worker ():
@@ -297,9 +297,10 @@ def init_worker():
297
297
if wait :
298
298
self .wait (timeout = timeout )
299
299
300
- def _run_multi_threaded (max_workers : int , feature_group_name : str , \
301
- sagemaker_fs_runtime_client_config : Config , data_frame : DataFrame , row_offset = 0 , timeout = None ) \
302
- -> List [int ]:
300
+ def _run_multi_threaded (
301
+ max_workers : int , feature_group_name : str ,
302
+ sagemaker_fs_runtime_client_config : Config ,
303
+ data_frame : DataFrame , row_offset = 0 , timeout = None ) -> List [int ]:
303
304
"""Start the ingestion process.
304
305
305
306
Args:
0 commit comments