我有一个自定义损失函数,我试图在我的模型上使用,但是当我loss.backward()
在 Pytorch 中使用时不起作用。
这是我的损失函数:
class Neg_Pearson(nn.Module): # Pearson range [-1, 1] so if < 0, abs|loss| ; if >0, 1- loss
def __init__(self):
super(Neg_Pearson,self).__init__()
return
def forward(self, preds, labels): # tensor [Batch, Temporal]
loss = 0
for i in range(preds.shape[0]):
sum_x = torch.sum(preds[i]) # x
sum_y = torch.sum(labels[i]) # y
sum_xy = torch.sum(preds[i]*labels[i]) # xy
sum_x2 = torch.sum(torch.pow(preds[i],2)) # x^2
sum_y2 = torch.sum(torch.pow(labels[i],2)) # y^2
N = preds.shape[1]
pearson = (N*sum_xy - sum_x*sum_y)/(torch.sqrt((N*sum_x2 - torch.pow(sum_x,2))*(N*sum_y2 - torch.pow(sum_y,2))))
loss += 1 - pearson
loss = loss.tolist()
loss = loss/preds.shape[0]
#print(loss)
return loss
当我尝试将它与这样的模型一起使用时:
yp = (yp-torch.mean(yp)) /torch.std(yp) # normalize
yt = (yt-torch.mean(yt)) /torch.std(yt) # normalize
loss = neg_pears_loss(yp, yt)
print(loss)
optimizer.zero_grad()
loss.backward()
optimizer.step()
我收到以下错误:AttributeError: 'float' object has no attribute 'backward'
有关如何解决此问题的任何建议?