singa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zhaoj...@apache.org
Subject [1/5] incubator-singa git commit: SINGA-313 Add L2 norm layer
Date Wed, 24 May 2017 12:12:20 GMT
Repository: incubator-singa
Updated Branches:
  refs/heads/master 2d1dd4290 -> 3415099a9


SINGA-313 Add L2 norm layer


Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/0815391d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/0815391d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/0815391d

Branch: refs/heads/master
Commit: 0815391d0488acbe3f788d71457c28ec4b083dee
Parents: 2d1dd42
Author: wangwei <wangwei@comp.nus.edu.sg>
Authored: Mon May 22 11:20:42 2017 +0800
Committer: wangwei <wangwei@comp.nus.edu.sg>
Committed: Mon May 22 11:20:42 2017 +0800

----------------------------------------------------------------------
 python/singa/layer.py | 33 +++++++++++++++++++++++++++++++++
 1 file changed, 33 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/0815391d/python/singa/layer.py
----------------------------------------------------------------------
diff --git a/python/singa/layer.py b/python/singa/layer.py
index caafef0..00b4763 100644
--- a/python/singa/layer.py
+++ b/python/singa/layer.py
@@ -544,6 +544,39 @@ class BatchNormalization(Layer):
             self.setup(input_sample_shape)
 
 
+class L2Norm(Layer):
+    '''Normalize each sample to have L2 norm = 1'''
+    def __init__(self, name, input_sample_shape, epsilon=1e-8):
+        super(L2Norm, self).__init__(name)
+        self.y = None
+        self.norm = None
+        self.name = name
+        self.epsilon = epsilon
+        self.out_sample_shape = input_sample_shape
+
+    def get_output_sample_shape(self):
+        return self.out_sample_shape
+
+    def forward(self, is_train, x):
+        norm = tensor.sum_columns(tensor.square(x))
+        norm += self.epsilon
+        norm = tensor.sqrt(norm)
+        self.y = x.clone()
+        self.y.div_column(norm)
+
+        if is_train:
+            self.norm = norm
+        return self.y
+
+    def backward(self, is_train, dy):
+        # (dy - y * k) / norm, k = sum(dy * y)
+        k = tensor.sum_columns(tensor.eltwise_mult(dy, self.y))
+        self.y.mult_column(k)
+        dx = dy - self.y
+        dx.div_column(self.norm)
+        return dx, []
+
+
 class LRN(Layer):
     """Local response normalization.
 


Mime
View raw message