繁体   English   中英

自定义激活 function 在 Tensorflow 与可训练参数

[英]Custom activation function in Tensorflow with trainable params

我正在尝试在 tensorflow 中实现自定义版本的PElu激活 function。 这种激活的习惯是平滑 relu 的膝盖。 我从这篇论文中得到了方程。

这是代码:

from keras import backend as K
import tensorflow as tf

def SMU_LeakyPRElu(x, alpha=2.5,u=1.0):
    return ((1+alpha)*x)+((1-alpha)*x)*(tf.math.erf(u*(1-alpha)*x))

from keras.layers import Layer

class SMU_LeakyPRElu(Layer):

    def __init__(self, alpha=2.5, u=1.0, trainable=False, **kwargs):
        super(SMU_LeakyPRElu, self).__init__(**kwargs)
        self.supports_masking = True
        self.alpha = alpha
        self.u = u
        self.trainable = trainable

    def build(self, input_shape):
        self.alpha_factor = K.variable(self.alpha,
                                      dtype=K.floatx(),
                                      name='alpha_factor')
        self.u_factor = K.variable(self.u,
                                      dtype=K.floatx(),
                                      name='u_factor')
        if self.trainable:
            self._trainable_weights.append(self.alpha_factor)
            self._trainable_weights.append(self.u_factor)

        super(SMU_LeakyPRElu, self).build(input_shape)

    def call(self, inputs, mask=None):
        return SMU_LeakyPRElu(inputs, self.alpha_factor,self.u_factor)

    def get_config(self):
        config = {'alpha': self.get_weights()[0] if self.trainable else self.alpha,
                  'u' : self.get_weights()[1] if self.trainable else self.u,
                  'trainable': self.trainable}
        base_config = super(SMU_LeakyPRElu, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))

    def compute_output_shape(self, input_shape):
        return input_shape

x = tf.random.normal((1,10,4))
print(x)
input_shape = (1,10,4)

input_layer = tf.keras.layers.Input(shape=input_shape[1:], name="input_layer")
layer_1 = tf.keras.layers.Conv1D(2, 1,padding = 'valid', input_shape=input_shape[:1])(input_layer)
layer_2 = SMU_LeakyPRElu(alpha=2.5,u=1.0,trainable=True)(layer_1)

model = tf.keras.models.Model(input_layer, layer_2, name="model")

model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.0005), loss="categorical_crossentropy", run_eagerly=True)

print(model.summary())
result = model.predict(x)
print(result)
print(result.shape)

我使用 Data Science SE 的这篇文章中的示例实现了这段代码。

错误:

tf.Tensor(
[[[ 1.0467066  -1.1833347   1.5384735   2.078511  ]
  [-1.6025988  -0.30846047  0.8019808   0.3113866 ]
  [ 0.58313304 -0.90643036 -0.3926888  -0.6210553 ]
  [ 0.16505387 -0.5930619   0.6983522  -0.12211661]
  [ 0.06077941 -0.11117186 -1.2540722  -0.32234746]
  [ 0.41838828  0.7090619   0.30999053  0.10459523]
  [ 0.35603598 -0.2695868  -0.17901018 -0.09100233]
  [ 1.2746769   0.8311447   0.02825974 -0.48021472]
  [-1.536545   -0.24765234 -0.36437735 -1.1891246 ]
  [ 0.7531206  -0.56109476 -0.65761757  0.19102335]]], shape=(1, 10, 4), dtype=float32)
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-50-c9d490dfd533> in <module>
      5 input_layer = tf.keras.layers.Input(shape=input_shape[1:], name="input_layer")
      6 layer_1 = tf.keras.layers.Conv1D(2, 1,padding = 'valid', input_shape=input_shape[:1])(input_layer)
----> 7 layer_2 = SMU_LeakyPRElu(alpha=2.5,u=1.0,trainable=True)(layer_1)
      8 
      9 model = tf.keras.models.Model(input_layer, layer_2, name="model")

1 frames
/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/type_spec.py in type_spec_from_value(value)
    888         3, "Failed to convert %r to tensor: %s" % (type(value).__name__, e))
    889 
--> 890   raise TypeError(f"Could not build a TypeSpec for {value} of "
    891                   f"unsupported type {type(value)}.")
    892 

TypeError: Could not build a TypeSpec for <__main__.SMU_LeakyPRElu object at 0x7fde698f7850> of unsupported type <class '__main__.SMU_LeakyPRElu'>.

我不明白这个错误。 我应该如何使用可训练参数alphau将这个 function 实现为自定义激活 function ?

问题是您将激活命名为 function 和您创建的自定义层相同的东西。 我为你重构了你的代码。

代码

import tensorflow as tf

from typing import Optional
from tensorflow.keras import Model
from tensorflow.keras.layers import Conv1D
from tensorflow.keras.layers import Input
from tensorflow.keras.layers import Layer
from tensorflow.keras.optimizers import Adam


class SMULeakyPReLU(Layer):
  """``SMULeakyPReLU``."""
  def __init__(self,
               alpha: float = 2.5,
               u: float = 1.,
               trainable: bool = False,
               **kwargs):
    super().__init__(**kwargs)
    self.alpha = alpha
    self.u = u
    self.trainable = trainable

  def build(self, input_shape: tf.TensorShape):
    super().build(input_shape)  
    self.alpha_factor = tf.Variable(
      self.alpha,
      dtype=tf.float32,
      trainable=self.trainable,
      name="alpha_factor")
    self.u_factor = tf.Variable(
      self.u,
      dtype=tf.float32,
      name="u_factor")

  def call(self,
           inputs: tf.Tensor,
           mask: Optional[tf.Tensor] = None
           ) -> tf.Tensor:
    fst = (1. + self.alpha_factor) * inputs
    snd = (1. - self.alpha_factor) * inputs
    trd = tf.math.erf(self.u_factor * (1. - self.alpha_factor) * inputs)
    return fst * snd * trd
  
  def get_config(self):
    config = {
        "alpha": self.get_weights()[0] if self.trainable else self.alpha,
        "u": self.get_weights()[1] if self.trainable else self.u,
        "trainable": self.trainable
    }
    base_config = super().get_config()
    return dict(list(base_config.items()) + list(config.items()))

测试

# fake data
x = tf.random.normal((1, 10, 4))

# create network
input_layer = Input(shape=x.shape[1:], name="input_layer")
layer_1 = Conv1D(2, 1, padding="valid")(input_layer)
layer_2 = SMULeakyPReLU(alpha=2.5, u=1.0, trainable=True)(layer_1)

# create model
model = Model(input_layer, layer_2, name="model")

# compile model and summary
model.compile(
    optimizer=Adam(learning_rate=5e-4),
    loss="categorical_crossentropy",
    run_eagerly=True)
print(model.summary())

# forward pass
result = model.predict(x)
print(result)
print(result.shape)

# Model: "model"
# _________________________________________________________________
#  Layer (type)                Output Shape              Param #   
# =================================================================
#  input_layer (InputLayer)    [(None, 10, 4)]           0         
#                                                                  
#  conv1d_1 (Conv1D)           (None, 10, 2)             10        
#                                                                  
#  smu_leaky_p_re_lu_1 (SMULea  (None, 10, 2)            2         
#  kyPReLU)                                                        
#                                                                  
# =================================================================
# Total params: 12
# Trainable params: 12
# Non-trainable params: 0
# _________________________________________________________________
# None
# 1/1 [==============================] - 0s 13ms/step
# [[[-1.6503611e+01 -3.5051659e+01]
#   [ 4.0098205e-02  1.5923592e+00]
#   [-1.4898951e+00  7.5487376e-05]
#   [ 3.1900513e+01  2.8786476e+01]
#   [ 1.9207695e+01  3.6511238e+01]
#   [-6.8302655e-01 -4.7705490e-02]
#   [ 9.6008554e-03  7.5611029e+00]
#   [ 4.7136435e-01  2.5528276e+00]
#   [ 2.6859209e-01  3.3496175e+00]
#   [ 1.4372441e+01  3.4978668e+01]]]
# (1, 10, 2)

暂无
暂无

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM