singa-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From GitBox <...@apache.org>
Subject [GitHub] [incubator-singa] joddiy commented on a change in pull request #484: SINGA -475 add Div operator implementation to singa
Date Mon, 05 Aug 2019 08:18:44 GMT
joddiy commented on a change in pull request #484: SINGA -475 add Div operator implementation
to singa
URL: https://github.com/apache/incubator-singa/pull/484#discussion_r310486283
 
 

 ##########
 File path: test/python/test_operation.py
 ##########
 @@ -34,6 +34,35 @@
 singa.Gaussian(0.0, 1.0, dy)
 
 
+def eval_numerical_gradient_b(f, x, y, reverse = False):
+    h = 0.00001
+    grad = np.zeros(x.shape)
+    if not reverse:
+        fx = f(x, y)
+        it = np.nditer(x, flags=['multi_index'], op_flags=['readwrite'])
+        while not it.finished:
+            ix = it.multi_index
+            old_value = x[ix]
+            x[ix] = old_value + h # increment by h
+            fxh = f(x,y) # evaluate f(x + h)
+            x[ix] = old_value # restore to previous value (very important!) 
+            grad[ix] = (fxh - fx) / h # the slope
+            it.iternext() # step to next dimension
+        return grad
+    else:
+        fx = f(x, y)
+        it = np.nditer(y, flags=['multi_index'], op_flags=['readwrite'])
+        while not it.finished:
+            iy = it.multi_index
+            old_value = y[iy]
+            y[iy] = old_value + h # increment by h
+            fyh = f(x,y) # evaluate f(y + h)
+            y[iy] = old_value # restore to previous value (very important!) 
+            grad[iy] = (fyh - fx) / h
+            it.iternext()
+        return grad
+
 
 Review comment:
   can we optimize this part as: 
   ```
   def eval_numerical_gradient_b(f, x, y, reverse = False):
       h = 0.00001
       grad = np.zeros(x.shape)
       t = y if reverse else x
       fx = f(x, y)
       it = np.nditer(t, flags=['multi_index'], op_flags=['readwrite'])
       while not it.finished:
           _it = it.multi_index
           old_value = t[_it]
           t[_it] = old_value + h # increment by h
           fth = f(x, y) # evaluate f(x + h)
           t[_it] = old_value # restore to previous value (very important!) 
           grad[_it] = (fth - fx) / h # the slope
           it.iternext() # step to next dimension
       return grad
   ```

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

Mime
View raw message