0

我正在从 Pytorch 过渡到 TensorFlow 1.12,并且想知道是否可以tf.keras.Sequential在 a 中定义类tf.keras.Model并以 Eager 模式运行这些类。

我构建了这个最小的非工作示例,如果有人能告诉我哪里出错了,我将不胜感激。但是,我也使用过tf.contrib.eager.Network类(取得了更大的成功),因为它们计划弃用,所以我尝试避免使用这些类。

import numpy as np
import tensorflow as tf
import tensorflow.contrib.eager as tfe
from keras.models import Sequential
from keras.layers import Dense, Activation
from tensorflow.train import AdamOptimizer

tf.enable_eager_execution()

class MLP(tf.keras.Model):
    def __init__(self, in_dim, out_dim, hidden_dim, num_layers, activation):
        super(MLP, self).__init__()
        model = Sequential()
        in_features = in_dim
        for layer in range(num_layers):
            model.add(Dense(hidden_dim,))
            model.add(Activation(activation))
            in_features = hidden_dim
        model.add(Dense(out_dim, input_shape=(hidden_dim,)))
        self.model = model

    def call(self, inputs):
        return self.model(inputs)

model = MLP(10, 1, 20, 4, 'relu')

optim = AdamOptimizer(learning_rate=1e-4)

for v in model.variables:
    print(v)

z = tf.convert_to_tensor(np.random.randn(100, 10), dtype=tf.float32)
with tfe.GradientTape() as tape:
    tape.watch(z)
    u = model(z)
    loss = tf.reduce_mean(tf.abs(u))
grad = tape.gradient(loss, model.trainable_variables)
optim.apply_gradients(zip(grad, model.trainable_variables))
print(loss.numpy())
4

1 回答 1

0

使用 from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Activation 代替: from keras.models import Sequential from keras.layers import Dense, Activation

于 2019-05-26T09:42:20.830 回答