Skip to content

Commit

Permalink
Remove unnecessary assert on sub_module.training
Browse files Browse the repository at this point in the history
  • Loading branch information
ringohoffman committed Mar 1, 2024
1 parent 857584f commit 0b6e185
Showing 1 changed file with 0 additions and 2 deletions.
2 changes: 0 additions & 2 deletions deepspeed/runtime/zero/parameter_offload.py
Original file line number Diff line number Diff line change
Expand Up @@ -466,7 +466,6 @@ def post_sub_module_forward_function(self, sub_module):

@torch.no_grad()
def pre_sub_module_backward_function(self, sub_module):
assert sub_module.training, "backward pass is invalid for module in evaluation mode"
param_coordinator = self.get_param_coordinator(training=True)
param_coordinator.trace_prologue(sub_module)
if param_coordinator.is_record_trace():
Expand All @@ -475,7 +474,6 @@ def pre_sub_module_backward_function(self, sub_module):

@torch.no_grad()
def post_sub_module_backward_function(self, sub_module):
assert sub_module.training, "backward pass is invalid for module in evaluation mode"
see_memory_usage(
f"After sub module backward function {sub_module.__class__.__name__} {sub_module.id} before release",
force=False)
Expand Down

0 comments on commit 0b6e185

Please sign in to comment.