singa-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From GitBox <...@apache.org>
Subject [GitHub] [incubator-singa] xuewanqi commented on a change in pull request #468: Distributted module
Date Mon, 15 Jul 2019 06:35:20 GMT
xuewanqi commented on a change in pull request #468: Distributted module
URL: https://github.com/apache/incubator-singa/pull/468#discussion_r303299513
 
 

 ##########
 File path: python/singa/dist_opt.py
 ##########
 @@ -1,27 +1,32 @@
 from . import singa_wrap as singa
+from .opt import SGD
 
-class Dist_SGD(object):
-	def __init__(self, lr=0.01, nDev=1):
-		self.lr=lr
-		# def start_MPI():
-		# 	pass
-		# def create_communicator():
-		# 	pass
-			# could be combined with start_MPI
-		self.communicator=singa.Communicator(nDev)
-		self.world_size=self.communicator.totalMPIRanksInGlobal
-		self.rank_in_local=self.communicator.MPIRankInLocal
-		self.rank_in_global=self.communicator.MPIRankInGlobal
 
-	def dist_update(self, param, grad):
-		# singa.synch(grad.data, self.communicator)
-		# grad /= self.communicator.totalMPIRanksInGlobal
-		grad = self.synch(grad)
-		param -= grad * self.lr 
+class DistOpt(object):
 
-	def synch(self, tensor):
-		singa.synch(tensor.data, self.communicator)
-		tensor /= self.world_size
-		return tensor
-	
+    def __init__(self, opt=SGD(), nDev=1):
+        # The class is designed to wrap an optimizer to do disttributed training.
+        # opt: The optimizer to be wrapped. nDev: number of devices(GPUs) a
+        # process will control/use.
 
+        # world_size: total number of processes.
+        # rank_in_local: local rank of a process on the current node.
+        # rank_in_global: global rank of a process
+
+        self.opt = opt
+        self.communicator = singa.Communicator(nDev)
+        self.world_size = self.communicator.totalMPIRanksInGlobal
+        self.rank_in_local = self.communicator.MPIRankInLocal
+        self.rank_in_global = self.communicator.MPIRankInGlobal
+
+    def update(self, param, grad):
+        # singa.synch(grad.data, self.communicator)
+        # grad /= self.communicator.totalMPIRanksInGlobal
+        grad = self.synch(grad)
 
 Review comment:
   ok, have changed the name

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

Mime
View raw message