from core.leras import nn tf = nn.tf class TLU(nn.LayerBase): """ Tensorflow implementation of Filter Response Normalization Layer: Eliminating Batch Dependence in theTraining of Deep Neural Networks https://arxiv.org/pdf/1911.09737.pdf """ def __init__(self, in_ch, dtype=None, **kwargs): self.in_ch = in_ch if dtype is None: dtype = nn.floatx self.dtype = dtype super().__init__(**kwargs) def build_weights(self): self.tau = tf.get_variable("tau", (self.in_ch,), dtype=self.dtype, initializer=tf.initializers.zeros() ) def get_weights(self): return [self.tau] def forward(self, x): if nn.data_format == "NHWC": shape = (1,1,1,self.in_ch) else: shape = (1,self.in_ch,1,1) tau = tf.reshape ( self.tau, shape ) return tf.math.maximum(x, tau) nn.TLU = TLU