singa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From wang...@apache.org
Subject [1/2] incubator-singa git commit: SINGA-18 update API for displaying performance metric Update the API of TrainOneBatch and TestOneBatch to support different ways of displaying performance metric
Date Thu, 18 Jun 2015 11:34:12 GMT
Repository: incubator-singa
Updated Branches:
  refs/heads/master 81f7a12eb -> a32102e64


SINGA-18 update API for displaying performance metric
Update the API of TrainOneBatch and TestOneBatch to support different ways of displaying performance
metric


Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/f1c84912
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/f1c84912
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/f1c84912

Branch: refs/heads/master
Commit: f1c84912bbba4c8692e6cf5f1d3d574e4469a61d
Parents: 51a924d
Author: zhaojing <zhaojing@comp.nus.edu.sg>
Authored: Wed Jun 17 23:35:05 2015 +0800
Committer: zhaojing <zhaojing@comp.nus.edu.sg>
Committed: Wed Jun 17 23:35:05 2015 +0800

----------------------------------------------------------------------
 include/trainer/worker.h |  8 +++---
 src/trainer/worker.cc    | 60 +++++++++++++++++++++----------------------
 2 files changed, 34 insertions(+), 34 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/f1c84912/include/trainer/worker.h
----------------------------------------------------------------------
diff --git a/include/trainer/worker.h b/include/trainer/worker.h
index a4054cb..04a68ea 100644
--- a/include/trainer/worker.h
+++ b/include/trainer/worker.h
@@ -45,11 +45,11 @@ class Worker {
     * Train one mini-batch.
     * Test/Validation is done before training.
     */
-  virtual void TrainOneBatch(int step)=0;
+  virtual void TrainOneBatch(int step, Metric* perf)=0;
   /**
    * Test/validate one mini-batch.
    */
-  virtual void TestOneBatch(int step, Phase phase, shared_ptr<NeuralNet> net)=0;
+  virtual void TestOneBatch(int step, Phase phase, shared_ptr<NeuralNet> net, Metric*
perf)=0;
   /**
     * Test the perforance of the learned model on validation or test dataset.
     * Test is done by the first group.
@@ -142,8 +142,8 @@ class BPWorker: public Worker{
  public:
   BPWorker(int thread_id, int group_id, int worker_id);
   ~BPWorker(){}
-  virtual void TrainOneBatch(int step);
-  virtual void TestOneBatch(int step, Phase phase, shared_ptr<NeuralNet> net);
+  virtual void TrainOneBatch(int step, Metric* perf);
+  virtual void TestOneBatch(int step, Phase phase, shared_ptr<NeuralNet> net, Metric*
perf);
   void Forward(int step, Phase phase, shared_ptr<NeuralNet> net);
   void Backward(int step, shared_ptr<NeuralNet> net);
 };

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/f1c84912/src/trainer/worker.cc
----------------------------------------------------------------------
diff --git a/src/trainer/worker.cc b/src/trainer/worker.cc
index b308c4e..f047f0f 100644
--- a/src/trainer/worker.cc
+++ b/src/trainer/worker.cc
@@ -169,22 +169,9 @@ void Worker::RunOneBatch(int step, Metric* perf){
     CollectAll(test_net_, step);
     Test(modelproto_.test_steps(), kTest, test_net_);
   }
-  TrainOneBatch(step);
+  TrainOneBatch(step, perf);
   //LOG(ERROR)<<"Train "<<step;
   if(perf!=nullptr){
-    auto losslayers=train_net_->losslayers();
-    for(auto layer: losslayers){
-      if(layer->partitionid()==worker_id_){
-        const float * ptr=layer->metric().cpu_data();
-        /*
-        for(int j=0;j<layer->metric().count();j++)
-          perf->AddMetric(std::to_string(j)+"#"+layer->name(), ptr[j]);
-        */
-        // hard code display info
-        perf->AddMetric(std::to_string(0)+"#loss", ptr[0]);
-        perf->AddMetric(std::to_string(1)+"#accuracy", ptr[1]);
-      }
-    }
     perf->Inc();
     if(DisplayNow(step)){
       perf->Avg();
@@ -206,22 +193,9 @@ void Worker::SendBlob(){
 }
 
 void Worker::Test(int nsteps, Phase phase, shared_ptr<NeuralNet> net){
-  const auto& losslayers=net->losslayers();
   Metric perf;
   for(int step=0;step<nsteps;step++){
-    TestOneBatch(step, phase, net);
-    for(auto layer: losslayers){
-      if(layer->partitionid()==worker_id_){
-        const float * ptr=layer->metric().cpu_data();
-        /*
-        for(int j=0;j<layer->metric().count();j++)
-          perf.AddMetric(std::to_string(j)+"#"+layer->name(), ptr[j]);
-        */
-        // hard code display info
-        perf.AddMetric(std::to_string(0)+"#loss", ptr[0]);
-        perf.AddMetric(std::to_string(1)+"#accuracy", ptr[1]);
-      }
-    }
+    TestOneBatch(step, phase, net, &perf);
     perf.Inc();
   }
   perf.Avg();
@@ -313,13 +287,39 @@ void BPWorker::Backward(int step, shared_ptr<NeuralNet> net){
   }
 }
 
-void BPWorker::TrainOneBatch(int step){
+void BPWorker::TrainOneBatch(int step, Metric* perf){
   Forward(step, kTrain, train_net_);
   Backward(step, train_net_);
+  auto losslayers=train_net_->losslayers();
+  for(auto layer: losslayers){
+      if(layer->partitionid()==worker_id_){
+        const float * ptr=layer->metric().cpu_data();
+        /*
+        for(int j=0;j<layer->metric().count();j++)
+          perf->AddMetric(std::to_string(j)+"#"+layer->name(), ptr[j]);
+        */
+        // hard code display info
+        perf->AddMetric(std::to_string(0)+"#loss", ptr[0]);
+        perf->AddMetric(std::to_string(1)+"#accuracy", ptr[1]);
+      }
+    }
 }
 
-void BPWorker::TestOneBatch(int step, Phase phase, shared_ptr<NeuralNet> net){
+void BPWorker::TestOneBatch(int step, Phase phase, shared_ptr<NeuralNet> net, Metric*
perf){
   Forward(step, phase, net);
+  const auto& losslayers=net->losslayers();
+  for(auto layer: losslayers){
+      if(layer->partitionid()==worker_id_){
+        const float * ptr=layer->metric().cpu_data();
+        /*
+        for(int j=0;j<layer->metric().count();j++)
+          perf.AddMetric(std::to_string(j)+"#"+layer->name(), ptr[j]);
+        */
+        // hard code display info
+        perf->AddMetric(std::to_string(0)+"#loss", ptr[0]);
+        perf->AddMetric(std::to_string(1)+"#accuracy", ptr[1]);
+      }
+    }
 }
 
 }  // namespace singa


Mime
View raw message