我在 Tensorflow 中实现了一个生成对抗网络。在测试期间,如果我使用与训练期间相同的 batch_size 生成图像(64),则生成的图像非常好;如果我一次生成一张图像,结果会很糟糕。
可能的原因可能是2:
- 批量标准化?
- 错误使用 tf.shape 来获取动态批量大小
这是我的代码:
from tensorflow.contrib.layers.python.layers import batch_norm
def conc(x, y):
"""Concatenate conditioning vector on feature map axis."""
x_shapes = x.get_shape()
y_shapes = y.get_shape()
x0 = tf.shape(x)[0]
x1 = x_shapes[1].value
x2 = x_shapes[2].value
y3 = y_shapes[3].value
return tf.concat([x, y * tf.ones(shape=(x0,x1,x2,y3))], 3)
def batch_normal(input, scope="scope", reuse=False):
return batch_norm(input, epsilon=1e-5, decay=0.9, scale=True, scope=scope, reuse=reuse, updates_collections=None)
def generator(z_var, y):
y_dim = y.get_shape()[1].value
z_var = tf.concat([z_var, y], 1)
d1 = tf.layers.dense(z_var, 1024,
kernel_initializer=tf.random_normal_initializer(stddev=0.02),
name='gen_fc1')
d1 = tf.nn.relu(batch_normal(d1, scope='gen_bn1'))
# add the second layer
d1 = tf.concat([d1, y], 1)
d2 = tf.layers.dense(d1, 7 * 7 * 128,
kernel_initializer=tf.random_normal_initializer(stddev=0.02),
name='gen_fc2')
d2 = tf.nn.relu(batch_normal(d2, scope='gen_bn2'))
d2 = tf.reshape(d2, [-1, 7, 7, 128])
y = tf.reshape(y, shape=[-1, 1, 1, y_dim])
d2 = conc(d2, y)
deconv1 = tf.layers.conv2d_transpose(d2, 64, (4, 4), strides=(2, 2), padding='same',
kernel_initializer=tf.random_normal_initializer(stddev=0.02),
name='gen_deconv1')
d3 = tf.nn.relu(batch_normal(deconv1, scope='gen_bn3'))
d3 = conc(d3, y)
deconv2 = tf.layers.conv2d_transpose(d3, 1, (4, 4), strides=(2, 2), padding='same',
kernel_initializer=tf.random_normal_initializer(stddev=0.02),
name='gen_deconv2')
return tf.nn.sigmoid(deconv2)