1

我正在使用 Keras Tuner 并使用 RandomSearch() 来超调我的回归模型。虽然我可以使用“relu”和“selu”进行超调,但我无法为 Leaky Relu 做同样的事情。我知道“relu”和“selu”字符串起作用的原因是,对于“relu”和“selu”,字符串别名可用。字符串别名不适用于 Leaky Relu。我尝试传递 Leaky Relu 的可调用对象(请参见下面的示例),但它似乎不起作用。你能告诉我怎么做吗?我在使用 Parametric Leaky Relu 时遇到了同样的问题,

先感谢您!

def build_model(hp):
model = Sequential()
model.add(
    Dense( 
        units = 18, 
        kernel_initializer = 'normal',
        activation = 'relu',
        input_shape = (18, ) 
    )
)
for i in range(hp.Int( name = "num_layers", min_value = 1, max_value = 5)):
    model.add(
        Dense(
            units = hp.Int(
            name = "units_" + str(i), 
            min_value = 18, 
            max_value = 180, 
            step = 18),
            kernel_initializer = 'normal',
            activation = hp.Choice(
                name = 'dense_activation',
                values=['relu', 'selu', LeakyReLU(alpha=0.01) ],
                default='relu'
            )
        )
    )
model.add( Dense( units = 1 ) )
model.compile(
    optimizer = tf.keras.optimizers.Adam(
        hp.Choice(
            name = "learning_rate", values = [1e-2, 1e-3, 1e-4]
        )
    ),
    loss = 'mse'
)
return model
4

2 回答 2

0

作为一种解决方法,您可以tf.keras.activations.*通过修改源文件(您将看到activations.py)在模块中添加另一个激活函数

tf.keras.activations.relu这是您将在 中看到的代码activations.py

@keras_export('keras.activations.relu')
@dispatch.add_dispatch_support
def relu(x, alpha=0., max_value=None, threshold=0):
  """Applies the rectified linear unit activation function.

  With default values, this returns the standard ReLU activation:
  `max(x, 0)`, the element-wise maximum of 0 and the input tensor.

  Modifying default parameters allows you to use non-zero thresholds,
  change the max value of the activation,
  and to use a non-zero multiple of the input for values below the threshold.

  For example:

  >>> foo = tf.constant([-10, -5, 0.0, 5, 10], dtype = tf.float32)
  >>> tf.keras.activations.relu(foo).numpy()
  array([ 0.,  0.,  0.,  5., 10.], dtype=float32)
  >>> tf.keras.activations.relu(foo, alpha=0.5).numpy()
  array([-5. , -2.5,  0. ,  5. , 10. ], dtype=float32)
  >>> tf.keras.activations.relu(foo, max_value=5).numpy()
  array([0., 0., 0., 5., 5.], dtype=float32)
  >>> tf.keras.activations.relu(foo, threshold=5).numpy()
  array([-0., -0.,  0.,  0., 10.], dtype=float32)

  Arguments:
      x: Input `tensor` or `variable`.
      alpha: A `float` that governs the slope for values lower than the
        threshold.
      max_value: A `float` that sets the saturation threshold (the largest value
        the function will return).
      threshold: A `float` giving the threshold value of the activation function
        below which values will be damped or set to zero.

  Returns:
      A `Tensor` representing the input tensor,
      transformed by the relu activation function.
      Tensor will be of the same shape and dtype of input `x`.
  """
  return K.relu(x, alpha=alpha, max_value=max_value, threshold=threshold)

复制此代码并将其粘贴到下方。更改@keras_export('keras.activations.relu')@keras_export( 'keras.activations.leaky_relu' )并将值更改alpha为 0.2,例如,

@keras_export('keras.activations.leaky_relu')
@dispatch.add_dispatch_support
def relu(x, alpha=0.2, max_value=None, threshold=0):
  """Applies the rectified linear unit activation function.

  With default values, this returns the standard ReLU activation:
  `max(x, 0)`, the element-wise maximum of 0 and the input tensor.

  Modifying default parameters allows you to use non-zero thresholds,
  change the max value of the activation,
  and to use a non-zero multiple of the input for values below the threshold.

  For example:

  >>> foo = tf.constant([-10, -5, 0.0, 5, 10], dtype = tf.float32)
  >>> tf.keras.activations.relu(foo).numpy()
  array([ 0.,  0.,  0.,  5., 10.], dtype=float32)
  >>> tf.keras.activations.relu(foo, alpha=0.5).numpy()
  array([-5. , -2.5,  0. ,  5. , 10. ], dtype=float32)
  >>> tf.keras.activations.relu(foo, max_value=5).numpy()
  array([0., 0., 0., 5., 5.], dtype=float32)
  >>> tf.keras.activations.relu(foo, threshold=5).numpy()
  array([-0., -0.,  0.,  0., 10.], dtype=float32)

  Arguments:
      x: Input `tensor` or `variable`.
      alpha: A `float` that governs the slope for values lower than the
        threshold.
      max_value: A `float` that sets the saturation threshold (the largest value
        the function will return).
      threshold: A `float` giving the threshold value of the activation function
        below which values will be damped or set to zero.

  Returns:
      A `Tensor` representing the input tensor,
      transformed by the relu activation function.
      Tensor will be of the same shape and dtype of input `x`.
  """
  return K.relu(x, alpha=alpha, max_value=max_value, threshold=threshold)

您可以使用字符串别名keras.activations.leaky_relu

于 2021-05-21T08:08:23.207 回答
0
# Custom activation function

from keras.layers import Activation
from keras import backend as K
from keras.utils.generic_utils import get_custom_objects


## Add leaky-relu so we can use it as a string

get_custom_objects().update({'leaky-relu': Activation(LeakyReLU(alpha=0.2))})

## Main activation functions available to use

activation_functions = ['sigmoid', 'relu', 'elu', 'leaky-relu', 'selu', 'gelu',"swish"]
于 2021-10-01T22:18:28.980 回答