def forward(x):
return w * x
def loss(y, y_predicted):
return ((y_predicted-y) ** 2).mean()
requires_grad
x = torch.randn(3, requires_grad=True)
print(x)
# tensor([-0.2124, -0.1003, -0.3773], requires_grad=True)
requires_grad
之後,會自動產生 Backward function 當他開始計算的過程中
y = x + 2
print(y)
# tensor([1.7876, 1.8997, 1.6227], grad_fn=<AddBackward0>)
z = y * y * 2
# tensor([6.3911, 7.2174, 5.2663], grad_fn=<MulBackward0>)
z = z.mean()
print(z)
# tensor(6.2916, grad_fn=<MeanBackward0>)
torch.backward()
呼叫 backward calculate 時就等於計算了 dz/dx
z.backward()
print(x.grad)
# tensor([2.3835, 2.5329, 2.1636])
import torch
x = torch.randn(3, requires_grad=True)
print(x)
# tensor([-0.2124, -0.1003, -0.3773], requires_grad=True)
# using: x.requires_grad_(False)
x.requires_grad_(False)
print(x)
# tensor([-0.2124, -0.1003, -0.3773])
# using: x.detach()
y = x.detach()
print(y)
# tensor([-0.2124, -0.1003, -0.3773])
# using: with torch.no_grad():
y = x + 2
print(y)
# tensor([1.7876, 1.8997, 1.6227], grad_fn=<AddBackward0>)
with torch.no_grad():
y = x + 2
print(y)
# tensor([1.7876, 1.8997, 1.6227])
x.grad.zero_()