使用线性回归时必须将 L2 正则化添加到成本函数中?
我在计算成本时没有添加 l2 或考虑。那是错的吗?
下面的代码片段应该足够了:
def gradient(self, X, Y, alpha, minibatch_size):
predictions = None
for batch in self.iterate_minibatches(X, Y, minibatch_size, shuffle=True):
x, y = batch
predictions = x.dot(self.theta)
for it in range(self.theta.size):
temp = x[:, it]
temp.shape = (y.size, 1)
errors_x1 = (predictions - y) * temp
self.theta[it] = self.theta[it] - alpha * (1.0 / y.size) * errors_x1.sum() + self.lambda_l2 * self.theta[it] * self.theta[it].T
print self.cost(X, Y, self.theta)
def cost(self, X, Y, theta, store=True):
predictions = X.dot(theta)
from sklearn.metrics import mean_squared_error
cost = mean_squared_error(Y, predictions, sample_weight=None, multioutput='uniform_average')
if store is True:
self.cost_history.append(cost)
return cost