File tree Expand file tree Collapse file tree 1 file changed +0
-7
lines changed
torch/distributed/checkpoint Expand file tree Collapse file tree 1 file changed +0
-7
lines changed Original file line number Diff line number Diff line change @@ -279,16 +279,9 @@ def _verify_state_dict(
279
279
optim_state_dict : OptimizerStateType ,
280
280
info : _StateDictInfo ,
281
281
) -> None :
282
- # FSDP root must exist otherwise FSDP state_dict will be incorrect.
283
- has_fsdp_root = False
284
282
for module in info .fsdp_modules :
285
283
fsdp_state = _get_module_fsdp_state_if_fully_sharded_module (module )
286
284
assert fsdp_state is not None , "Expected a fsdp_state with a fsdp module."
287
- if fsdp_state ._is_root :
288
- has_fsdp_root = True
289
- break
290
- if info .fsdp_modules and not has_fsdp_root :
291
- raise RuntimeError ("The model has FSDP modules but no FSDP root module exists." )
292
285
293
286
# Verify if the model_state_dict and optim_state_dict are valid. This API
294
287
# should give the users an explicit error message to debug or report.
You can’t perform that action at this time.
0 commit comments