0

我尝试了以下代码:

from d2l import tensorflow as d2l
import tensorflow as tf

@tf.function
def corr2d(X, k, Y):  #@save
    """Compute 2D cross-correlation."""
    with tf.GradientTape() as tape:
        for i in range(Y.shape[0]):
            for j in range(Y.shape[1]):
                Y[i, j].assign(tf.reduce_sum(tf.multiply(X[i: i + h, j: j + w], k)))
    print('Gradients = ', tape.gradient(Y, k)) # show the gradient
    print('Watched Variables = ', tape.watched_variables()) # show the watched varaibles

print(tf.__version__)
Xin= tf.constant([[0.0, 1.0, 2.0], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]])
kernel = tf.Variable([[0.0, 1.0], [2.0, 3.0]])
h, w = kernel.shape
Y_hat = tf.Variable(tf.zeros((Xin.shape[0] - h + 1, Xin.shape[1] - w + 1))) # prepare the output tensor
corr2d(X, kernel, Y_hat)
print(Y_hat)

我得到以下结果:

2.4.1
Gradients =  None
Watched Variables =  (<tf.Variable 'Variable:0' shape=(2, 2) dtype=float32>, <tf.Variable 'Variable:0' shape=(2, 2) dtype=float32>)
<tf.Variable 'Variable:0' shape=(2, 2) dtype=float32, numpy=
array([[19., 25.],
       [37., 43.]], dtype=float32)>

谁能解释为什么返回的梯度是None即使源变量kernel包含在监视变量列表中?

4

1 回答 1

0

我不确定我是否真的理解你想要做什么。您将变量作为渐变的目标传递。

考虑成本函数和变量总是更容易。假设您的成本函数是 y = x ** 2。在这种情况下,可以计算 y 相对于 x 的梯度。

基本上,您没有计算关于 k 的任何梯度的函数。

我做了一个小改动。检查可变成本。

import tensorflow as tf

def corr2d(X, k, Y):  #@save
    """Compute 2D cross-correlation."""
    with tf.GradientTape() as tape:
        cost = 0
        for i in range(Y.shape[0]):
            for j in range(Y.shape[1]):
                Y[i, j].assign(tf.reduce_sum(tf.multiply(X[i: i + h, j: j + w], k)))
                cost = cost + tf.reduce_sum(tf.multiply(X[i: i + h, j: j + w], k))
    print('\nGradients = ', tape.gradient(cost, k)) # show the gradient
    print('Watched Variables = ', tape.watched_variables()) # show the watched varaibles

print(tf.__version__)
Xin= tf.constant([[0.0, 1.0, 2.0], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]])
kernel = tf.Variable([[0.0, 1.0], [2.0, 3.0]])
h, w = kernel.shape
Y_hat = tf.Variable(tf.zeros((Xin.shape[0] - h + 1, Xin.shape[1] - w + 1))) # prepare the output tensor
corr2d(Xin, kernel, Y_hat)
print(Y_hat)

现在,你会得到

Gradients =  tf.Tensor(
[[ 8. 12.]
 [20. 24.]], shape=(2, 2), dtype=float32)
Watched Variables =  (<tf.Variable 'Variable:0' shape=(2, 2) dtype=float32, numpy=
array([[0., 1.],
       [2., 3.]], dtype=float32)>, <tf.Variable 'Variable:0' shape=(2, 2) dtype=float32, numpy=
array([[19., 25.],
       [37., 43.]], dtype=float32)>)
<tf.Variable 'Variable:0' shape=(2, 2) dtype=float32, numpy=
array([[19., 25.],
       [37., 43.]], dtype=float32)>
于 2021-02-17T06:12:21.930 回答