torch.autograd.backward
自动求取梯度
tensors 用于求导的张量
retain_graph 保存计算图
create_graph 创建导数计算图 用于高阶求导
grad_tensors 多梯度权重
flag = True
# flag = False
if flag:
# requires_grad 要求梯度
w = torch.tensor([1.],requires_grad=True)
x = torch.tensor([2.],requires_grad=True)
a = torch.add(w,x)
b = torch.add(w,1)
y = torch.mul(a,b)
y.backward()
print(w.grad)
autograd
梯度不自动清零
依赖于叶子结点的节点, requires_grad默认为True
叶子结点不可执行in-place
# flag = True
flag = False
if flag:
w = torch.tensor([1.],requires_grad=True)
x = torch.tensor([2.],requires_grad=True)
for i in range(2):
a = torch.add(w,x)
b = torch.add(w,1)
y = torch.mul(a,b)
y.backward()
print(w.grad)
w.grad.zero_()
flag = True
if flag:
a = torch.ones((1,))
print(id(a),a)
a = a+torch.ones((1,))
print(id(a),a)