singa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From m...@apache.org
Subject [02/14] incubator-singa git commit: SINGA-349 Create layer operations for autograd
Date Fri, 18 May 2018 04:52:08 GMT
SINGA-349 Create layer operations for autograd

1. cascade the new generated layer operations as well as some existing operations like malmul,
softmax to test the compatibility.

2. test the autograd engine on these new developed operations to confirm these operations
workable.


Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/195b4d42
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/195b4d42
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/195b4d42

Branch: refs/heads/master
Commit: 195b4d42151b94d163d5c377c41246012a229bae
Parents: a9d495a
Author: xuewanqi <36396136+xuewanqi@users.noreply.github.com>
Authored: Mon Apr 30 11:55:07 2018 +0800
Committer: Wang Wei <dcswaw@nus.edu.sg>
Committed: Thu May 17 21:19:06 2018 +0800

----------------------------------------------------------------------
 python/singa/convolution_operation.py | 38 +++++++++++++++++++++++++++---
 1 file changed, 35 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/195b4d42/python/singa/convolution_operation.py
----------------------------------------------------------------------
diff --git a/python/singa/convolution_operation.py b/python/singa/convolution_operation.py
index 7c42dce..dcab2d4 100644
--- a/python/singa/convolution_operation.py
+++ b/python/singa/convolution_operation.py
@@ -1,6 +1,8 @@
 from singa import tensor
 from singa import layer
 from singa.proto import model_pb2
+from singa import autograd
+
 
 
 def ctensor2numpy(x):
@@ -64,8 +66,6 @@ z= layer_1._do_backward(y.data)
 a=ctensor2numpy(y.data)
 
 
-
-
 class MaxPooling2D(tensor.Operation):
     def __init__(self, name, kernel=3, stride=2, border_mode='same', pad=None,
                  data_format='NCHW', input_sample_shape=None):
@@ -123,7 +123,7 @@ class Flatten(tensor.Operation):
 
     def __call__(self, x):
         if not self.PyLayer.has_setup:
-            self.PyLayer.setup(x.shape)
+            self.PyLayer.setup(x.shape[1:])
         return self._do_forward(x)
 
     def forward(self, x):
@@ -140,6 +140,38 @@ z= layer_1._do_backward(y.data)
 a=ctensor2numpy(y.data)
 
 
+inputs=tensor.Tensor(shape=(10, 2, 3, 3), requires_grad=False, stores_grad=False)
+inputs.gaussian(1,0)
+
+x = Convolution2D('conv',4)(inputs)[0]
+print(x.shape)
+
+x = MaxPooling2D('pooling')(x)[0]
+print(x.shape)
+
+x = Activation('relu')(x)[0]
+print(x.shape)
+
+x = Flatten('flatten')(x)[0]
+print(x.shape)
+
+w0 = tensor.Tensor(shape=(4, 10), requires_grad=True, stores_grad=True)
+w0.gaussian(0.0, 0.1)
+x = tensor.matmul(x, w0)
+print(x.shape)
+
+x = tensor.softmax(x)
+
+target=tensor.Tensor(shape=(10, 10), requires_grad=False, stores_grad=False)
+target.gaussian(0.0 ,0.1)
+loss = tensor.cross_entropy(x, target)
+
+grad=autograd.backward(loss)
+print(grad)
+
+
+
+
 
 
 


Mime
View raw message