singa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kaip...@apache.org
Subject [1/3] incubator-singa git commit: SINGA-311 Add unittest for Adam updater
Date Tue, 11 Apr 2017 12:04:39 GMT
Repository: incubator-singa
Updated Branches:
  refs/heads/master 38da78914 -> b35e03f6f


SINGA-311 Add unittest for Adam updater

fix a bug in Adam optimizer about the step counting
add python unittest for Adam in test_optimzier.py


Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/4fd44ed2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/4fd44ed2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/4fd44ed2

Branch: refs/heads/master
Commit: 4fd44ed28309fa54c720cbd5783244fda645b7c4
Parents: 85dbad7
Author: wangwei <wangwei@comp.nus.edu.sg>
Authored: Tue Apr 11 18:21:15 2017 +0800
Committer: wangwei <wangwei@comp.nus.edu.sg>
Committed: Tue Apr 11 18:21:15 2017 +0800

----------------------------------------------------------------------
 python/singa/optimizer.py     |  8 +++++---
 test/python/test_optimizer.py | 42 ++++++++++++++++++++++++++++++++++++++
 2 files changed, 47 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/4fd44ed2/python/singa/optimizer.py
----------------------------------------------------------------------
diff --git a/python/singa/optimizer.py b/python/singa/optimizer.py
index 164921f..614fe6d 100644
--- a/python/singa/optimizer.py
+++ b/python/singa/optimizer.py
@@ -323,9 +323,9 @@ class Adam(Optimizer):
         self.epsilon = epsilon
         self.m = {}
         self.v = {}
-        self.t = 1
-        self.last_epoch = 0
-        self.last_step = 0
+        self.t = 0
+        self.last_epoch = -1
+        self.last_step = -1
 
     def apply_with_lr(self, epoch, lr, grad, value, name, step):
         '''Update one parameter object.
@@ -339,6 +339,8 @@ class Adam(Optimizer):
         assert step != -1, 'step should >= 0'
         if epoch != self.last_epoch or step != self.last_step:
             self.t += 1
+            self.last_step = step
+            self.last_epoch = epoch
         grad = self.apply_regularizer_constraint(epoch, value, grad, name, step)
         if name is not None and name in self.learning_rate_multiplier:
             lr = lr * self.learning_rate_multiplier[name]

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/4fd44ed2/test/python/test_optimizer.py
----------------------------------------------------------------------
diff --git a/test/python/test_optimizer.py b/test/python/test_optimizer.py
index 601aada..cfd13c0 100644
--- a/test/python/test_optimizer.py
+++ b/test/python/test_optimizer.py
@@ -16,6 +16,7 @@
 # under the License.
 # =============================================================================
 import unittest
+import math
 import numpy as np
 
 
@@ -28,6 +29,16 @@ if singa_wrap.USE_CUDA:
     cuda = device.create_cuda_gpu()
 
 
+def np_adam(plist, glist, mlist, vlist, lr, t, b1=0.9, b2=0.999):
+    for p, g, m, v in zip(plist, glist, mlist, vlist):
+        m *=b1
+        m += (1-b1) * g
+        v *= b2
+        v += (1-b2) * g * g
+        alpha = lr * math.sqrt(1. - math.pow(b2, t)) / (1. - math.pow(b1, t))
+        p -= alpha * m / (np.sqrt(v) + 1e-8)
+
+
 class TestOptimizer(unittest.TestCase):
 
     def setUp(self):
@@ -48,6 +59,37 @@ class TestOptimizer(unittest.TestCase):
         for i in range(self.W.size()):
             self.assertAlmostEqual(w[i], self.np_W[i] - lr * self.np_g[i])
 
+    def test_adam(self):
+        lr = 0.1
+        n, m = 4, 6
+        p1 = np.random.rand(n, m)
+        p2 = np.random.rand(n, m)
+        g1 = np.random.rand(n, m) * 0.01
+        g2 = np.random.rand(n, m) * 0.01
+        m1 = np.zeros((n, m))
+        m2 = np.zeros((n, m))
+        v1 = np.zeros((n, m))
+        v2 = np.zeros((n, m))
+        t1 = tensor.from_numpy(p1)
+        t2 = tensor.from_numpy(p2)
+        tg1 = tensor.from_numpy(g1)
+        tg2 = tensor.from_numpy(g2)
+
+        for t in range(1, 10):
+            np_adam([p1, p2], [g1, g2], [m1, m2], [v1, v2], lr, t)
+
+        adam = opt.Adam(lr=lr)
+        for t in range(1, 10):
+            adam.apply(0, tg1, t1, 'p1', t)
+            adam.apply(0, tg2, t2, 'p2', t)
+
+        t1 = tensor.to_numpy(t1)
+        t2 = tensor.to_numpy(t2)
+        for t, p in zip([t1, t2], [p1, p2]):
+            for i in range(n):
+                for j in range(m):
+                    self.assertAlmostEqual(t[i, j], p[i, j], 6)
+
     @unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
     def test_sgd_cuda(self):
         lr = 0.1


Mime
View raw message