0

我是张量流和神经网络的新手。

在下面的代码中,一切正常。但是,一旦我通过取消注释来实现 tf.sigmoid 函数 #X21 = tf.sigmoid(X21); 我得到了奇怪的结果,我的所有预测都等于 1。为什么会发生这种情况?

请注意,我正在预测以千为单位的房价。

# Set model weights
b1_1 = tf.cast(tf.Variable(np.random.randn(), name="bias"),tf.float64)
b1_2 = tf.cast(tf.Variable(np.random.randn(), name="bias"),tf.float64)
b2_1 = tf.cast(tf.Variable(np.random.randn(), name="bias"),tf.float64)

W1_1 = tf.cast(tf.Variable(np.random.randn(train_X.shape[1], 1), name="bias"),tf.float64)
W1_2 = tf.cast(tf.Variable(np.random.randn(train_X.shape[1], 1), name="bias"),tf.float64)
W2_1 = tf.cast(tf.Variable(np.random.randn(2, 1), name="bias"),tf.float64)

X11 = tf.add(tf.matmul(train_X,W1_1),b1_1)
X12 = tf.add(tf.matmul(train_X,W1_2),b1_2)
X21 = tf.add(tf.matmul(tf.squeeze(tf.transpose(tf.stack((X11,X12)))),W2_1),b2_1)
#X21 = tf.sigmoid(X21)

# placeholders for a tensor that will be always fed.
Y = tf.placeholder('float64', shape = [47, 1])

cost = (tf.reduce_sum(tf.pow(X21-Y, 2))/(2*n_samples))
optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)

# Initialize the variables (i.e. assign their default value)
init = tf.global_variables_initializer()

# Start training
with tf.Session() as sess:

    # Run the initializer
    sess.run(init)

    # Fit all training data
    for epoch in range(training_epochs):
        #for (x, y) in zip(train_X, train_Y):
        sess.run(optimizer, feed_dict={Y: train_Y})
                # Display logs per epoch step
        if (epoch+1) % display_step == 0:
            c = sess.run(cost, feed_dict={Y: train_Y})
            print("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(c))

    print("Optimization Finished!")

    training_cost = sess.run(cost, feed_dict={Y: train_Y})
    print("Training cost=", training_cost)
    line = sess.run(X21,feed_dict={Y: train_Y})
4

0 回答 0