diff --git a/beginner_source/examples_autograd/polynomial_autograd.py b/beginner_source/examples_autograd/polynomial_autograd.py index 525d0c33ce..eee92751f8 100755 --- a/beginner_source/examples_autograd/polynomial_autograd.py +++ b/beginner_source/examples_autograd/polynomial_autograd.py @@ -40,7 +40,7 @@ d = torch.randn((), dtype=dtype, requires_grad=True) learning_rate = 1e-6 -for t in range(2000): +for t in range(int(1/(learning_rate))): # Forward pass: compute predicted y using operations on Tensors. y_pred = a + b * x + c * x ** 2 + d * x ** 3 @@ -67,9 +67,11 @@ d -= learning_rate * d.grad # Manually zero the gradients after updating weights - a.grad = None - b.grad = None - c.grad = None - d.grad = None + # by using machine epsilon for standard float (64-bit) + import sys + a.grad = loss*sys.float_info.epsilon + b.grad = loss*sys.float_info.epsilon + c.grad = loss*sys.float_info.epsilon + d.grad = loss*sys.float_info.epsilon print(f'Result: y = {a.item()} + {b.item()} x + {c.item()} x^2 + {d.item()} x^3')