Skip to content

Commit

Permalink
Restore compatibility with Python 3.8
Browse files Browse the repository at this point in the history
Signed-off-by: Przemyslaw Tredak <[email protected]>
  • Loading branch information
ptrendx committed Sep 17, 2024
1 parent 528d44b commit e1252b1
Showing 1 changed file with 10 additions and 14 deletions.
24 changes: 10 additions & 14 deletions transformer_engine/pytorch/distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,13 +354,11 @@ def backward(

# Compute the forward pass.
detached_inputs = detach_variable(inputs)
with (
torch.enable_grad(),
ctx.recompute_ctx,
ctx.torch_gpu_amp_ctx,
ctx.torch_cpu_amp_ctx,
activation_recompute_forward(activation_recompute=True, recompute_phase=True),
):
with torch.enable_grad(), \
ctx.recompute_ctx, \
ctx.torch_gpu_amp_ctx, \
ctx.torch_cpu_amp_ctx, \
activation_recompute_forward(activation_recompute=True, recompute_phase=True):
outputs = ctx.run_function(*detached_inputs, **ctx.kwargs)

# Set the states back to what it was at the start of this function.
Expand Down Expand Up @@ -680,13 +678,11 @@ def checkpoint(
torch_gpu_amp_forward_ctx, torch_cpu_amp_forward_ctx = _get_active_autocast_contexts()

def recompute_fn(*args, **kwargs):
with (
torch.autograd.enable_grad(),
te_recompute_ctx,
user_recompute_ctx,
torch_gpu_amp_forward_ctx,
torch_cpu_amp_forward_ctx,
):
with torch.autograd.enable_grad(), \
te_recompute_ctx, \
user_recompute_ctx, \
torch_gpu_amp_forward_ctx, \
torch_cpu_amp_forward_ctx:
function(*args, **kwargs)

# Initialize a new checkpoint frame for each new forward pass.
Expand Down

0 comments on commit e1252b1

Please sign in to comment.