From f044c99ddc158cd23d090826f22957bdffd0ee06 Mon Sep 17 00:00:00 2001 From: iperov Date: Thu, 15 Jul 2021 00:50:11 +0400 Subject: [PATCH] remove ConvolutionAwareInitializer from layers, because lr-dropout does the same thing. --- core/leras/layers/Conv2D.py | 4 ++-- core/leras/layers/Conv2DTranspose.py | 4 ++-- core/leras/layers/DepthwiseConv2D.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/core/leras/layers/Conv2D.py b/core/leras/layers/Conv2D.py index 7d4d444..a5febf0 100644 --- a/core/leras/layers/Conv2D.py +++ b/core/leras/layers/Conv2D.py @@ -55,8 +55,8 @@ class Conv2D(nn.LayerBase): if kernel_initializer is None: kernel_initializer = tf.initializers.random_normal(0, 1.0, dtype=self.dtype) - if kernel_initializer is None: - kernel_initializer = nn.initializers.ca() + #if kernel_initializer is None: + # kernel_initializer = nn.initializers.ca() self.weight = tf.get_variable("weight", (self.kernel_size,self.kernel_size,self.in_ch,self.out_ch), dtype=self.dtype, initializer=kernel_initializer, trainable=self.trainable ) diff --git a/core/leras/layers/Conv2DTranspose.py b/core/leras/layers/Conv2DTranspose.py index 937d624..a2e97dc 100644 --- a/core/leras/layers/Conv2DTranspose.py +++ b/core/leras/layers/Conv2DTranspose.py @@ -38,8 +38,8 @@ class Conv2DTranspose(nn.LayerBase): if kernel_initializer is None: kernel_initializer = tf.initializers.random_normal(0, 1.0, dtype=self.dtype) - if kernel_initializer is None: - kernel_initializer = nn.initializers.ca() + #if kernel_initializer is None: + # kernel_initializer = nn.initializers.ca() self.weight = tf.get_variable("weight", (self.kernel_size,self.kernel_size,self.out_ch,self.in_ch), dtype=self.dtype, initializer=kernel_initializer, trainable=self.trainable ) if self.use_bias: diff --git a/core/leras/layers/DepthwiseConv2D.py b/core/leras/layers/DepthwiseConv2D.py index 98f3a0a..2916f01 100644 --- a/core/leras/layers/DepthwiseConv2D.py +++ b/core/leras/layers/DepthwiseConv2D.py @@ -68,8 +68,8 @@ class DepthwiseConv2D(nn.LayerBase): if kernel_initializer is None: kernel_initializer = tf.initializers.random_normal(0, 1.0, dtype=self.dtype) - if kernel_initializer is None: - kernel_initializer = nn.initializers.ca() + #if kernel_initializer is None: + # kernel_initializer = nn.initializers.ca() self.weight = tf.get_variable("weight", (self.kernel_size,self.kernel_size,self.in_ch,self.depth_multiplier), dtype=self.dtype, initializer=kernel_initializer, trainable=self.trainable )