horn-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zjaf...@apache.org
Subject [3/4] incubator-horn git commit: HORN-10 implemented first iteration of convNeuron and standard neuron
Date Wed, 08 Jun 2016 04:42:23 GMT
HORN-10 implemented first iteration of convNeuron and standard neuron


Project: http://git-wip-us.apache.org/repos/asf/incubator-horn/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-horn/commit/23d2b5c3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-horn/tree/23d2b5c3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-horn/diff/23d2b5c3

Branch: refs/heads/master
Commit: 23d2b5c39e5d243403c4d59f773fdc0ac49774bb
Parents: 99908f9
Author: Zachary Jaffee <zij@case.edu>
Authored: Tue Jun 7 21:20:23 2016 -0700
Committer: Zachary Jaffee <zij@case.edu>
Committed: Tue Jun 7 21:20:23 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/horn/core/ConvNeuron.java   | 68 +++++++++++++++
 .../org/apache/horn/core/StandardNeuron.java    | 68 +++++++++++++++
 .../examples/ConvolutionalNeuralNetwork.java    | 83 ++++++++++++++++++
 .../org/apache/horn/core/TestConvNeuron.java    | 89 ++++++++++++++++++++
 4 files changed, 308 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-horn/blob/23d2b5c3/src/main/java/org/apache/horn/core/ConvNeuron.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/horn/core/ConvNeuron.java b/src/main/java/org/apache/horn/core/ConvNeuron.java
new file mode 100644
index 0000000..2e340f8
--- /dev/null
+++ b/src/main/java/org/apache/horn/core/ConvNeuron.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.examples;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hama.HamaConfiguration;
+import org.apache.horn.core.HornJob;
+import org.apache.horn.core.Neuron;
+import org.apache.horn.core.Synapse;
+import org.apache.horn.funcs.CrossEntropy;
+import org.apache.horn.funcs.Sigmoid;
+
+public class ConvNeuron extends Neuron<Synapse<FloatWritable, FloatWritable>>
{
+  private double learningRate;
+  private double momentum;
+
+  @Override
+  public void setup(HamaConfiguration conf) {
+    this.learningRate = conf.getDouble("mlp.learning.rate", 0.5);
+    this.momentum = conf.getDouble("mlp.momentum.weight", 0.2);
+  }
+
+  @Override
+  public void forward(
+      Iterable<Synapse<FloatWritable, FloatWritable>> messages)
+      throws IOException {
+    double sum = 0;
+    for (Synapse<FloatWritable, FloatWritable> m : messages) {
+      sum += m.getInput() * m.getWeight();
+    }
+
+    this.feedforward(this.squashingFunction.apply(sum));
+  }
+
+  @Override
+  public void backward(
+      Iterable<Synapse<FloatWritable, FloatWritable>> messages)
+      throws IOException {
+    for (Synapse<FloatWritable, FloatWritable> m : messages) {
+      // Calculates error gradient for each neuron
+      double gradient = this.squashingFunction.applyDerivative(this
+          .getOutput()) * (m.getDelta() * m.getWeight());
+      this.backpropagate(m);
+
+      // Weight corrections
+      double weight = -learningRate * this.getOutput() * m.getDelta()
+          + momentum * m.getPrevWeight();
+      this.push(weight);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-horn/blob/23d2b5c3/src/main/java/org/apache/horn/core/StandardNeuron.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/horn/core/StandardNeuron.java b/src/main/java/org/apache/horn/core/StandardNeuron.java
new file mode 100644
index 0000000..29d3655
--- /dev/null
+++ b/src/main/java/org/apache/horn/core/StandardNeuron.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.examples;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hama.HamaConfiguration;
+import org.apache.horn.core.HornJob;
+import org.apache.horn.core.Neuron;
+import org.apache.horn.core.Synapse;
+import org.apache.horn.funcs.CrossEntropy;
+import org.apache.horn.funcs.Sigmoid;
+
+public class StandardNeuron extends Neuron<Synapse<FloatWritable, FloatWritable>>
{
+  private double learningRate;
+  private double momentum;
+
+  @Override
+  public void setup(HamaConfiguration conf) {
+    this.learningRate = conf.getDouble("mlp.learning.rate", 0.5);
+    this.momentum = conf.getDouble("mlp.momentum.weight", 0.2);
+  }
+
+  @Override
+  public void forward(
+      Iterable<Synapse<FloatWritable, FloatWritable>> messages)
+      throws IOException {
+    double sum = 0;
+    for (Synapse<FloatWritable, FloatWritable> m : messages) {
+      sum += m.getInput() * m.getWeight();
+    }
+
+    this.feedforward(this.squashingFunction.apply(sum));
+  }
+
+  @Override
+  public void backward(
+      Iterable<Synapse<FloatWritable, FloatWritable>> messages)
+      throws IOException {
+    for (Synapse<FloatWritable, FloatWritable> m : messages) {
+      // Calculates error gradient for each neuron
+      double gradient = this.squashingFunction.applyDerivative(this
+          .getOutput()) * (m.getDelta() * m.getWeight());
+      this.backpropagate(gradient);
+
+      // Weight corrections
+      double weight = -learningRate * this.getOutput() * m.getDelta()
+          + momentum * m.getPrevWeight();
+      this.push(weight);
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-horn/blob/23d2b5c3/src/main/java/org/apache/horn/examples/ConvolutionalNeuralNetwork.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/horn/examples/ConvolutionalNeuralNetwork.java b/src/main/java/org/apache/horn/examples/ConvolutionalNeuralNetwork.java
new file mode 100644
index 0000000..18f3391
--- /dev/null
+++ b/src/main/java/org/apache/horn/examples/ConvolutionalNeuralNetwork.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.examples;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hama.HamaConfiguration;
+import org.apache.horn.core.HornJob;
+import org.apache.horn.core.Neuron;
+import org.apache.horn.core.StandardNeuron;
+import org.apache.horn.core.ConvNeuron;
+import org.apache.horn.core.Synapse;
+import org.apache.horn.funcs.CrossEntropy;
+import org.apache.horn.funcs.Sigmoid;
+
+public class ConvolutionalNeuralNetwork {
+
+  //possible way to pass classes is have a list of tuples to pass the neuron and squashing
funciton value at the same time
+  public static CNNHornJob createJob(HamaConfiguration conf, String modelPath,
+      String inputPath, double learningRate, double momemtumWeight,
+      double regularizationWeight, int features, int labels, int maxIteration,
+      int numOfTasks) throws IOException {
+
+    CNNHornJob job = new CNNHornJob(conf, ConvolutionalNeuralNetwork.class);
+    job.setTrainingSetPath(inputPath);
+    job.setModelPath(modelPath);
+
+    job.setNumBspTask(numOfTasks);
+    job.setMaxIteration(maxIteration);
+    job.setLearningRate(learningRate);
+    job.setMomentumWeight(momemtumWeight);
+    job.setRegularizationWeight(regularizationWeight);
+
+    job.setConvergenceCheckInterval(1000);
+    job.setBatchSize(300);
+
+    cnn.addLayer(150, ReLu.class, ConvNeuron.class); // convolution layer
+    ccn.addLayer(100, Sigmoid.class, StandardNeuron.class); // fully connected
+    ccn.addLayer(100, Sigmoid.class, StandardNeuron.class); // fully connected
+    ccn.outputLayer(10, Sigmoid.class, StandardNeuron.class); // fully connected
+
+    job.setCostFunction(CrossEntropy.class);
+
+    return job;
+  }
+
+  public static void main(String[] args) throws IOException,
+      InterruptedException, ClassNotFoundException {
+    //TODO: implement this for real
+    if (args.length < 9) {
+      System.out
+          .println("Usage: model_path training_set learning_rate momentum regularization_weight
feature_dimension label_dimension max_iteration num_tasks");
+      System.exit(1);
+    }
+    HornJob ann = createJob(new HamaConfiguration(), args[0], args[1],
+        Double.parseDouble(args[2]), Double.parseDouble(args[3]),
+        Double.parseDouble(args[4]), Integer.parseInt(args[5]),
+        Integer.parseInt(args[6]), Integer.parseInt(args[7]),
+        Integer.parseInt(args[8]));
+
+    long startTime = System.currentTimeMillis();
+    if (ann.waitForCompletion(true)) {
+      System.out.println("Job Finished in "
+          + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds");
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-horn/blob/23d2b5c3/src/test/java/org/apache/horn/core/TestConvNeuron.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/horn/core/TestConvNeuron.java b/src/test/java/org/apache/horn/core/TestConvNeuron.java
new file mode 100644
index 0000000..28c921c
--- /dev/null
+++ b/src/test/java/org/apache/horn/core/TestConvNeuron.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.core;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.horn.funcs.CrossEntropy;
+import org.apache.horn.funcs.Sigmoid;
+
+public class TestConvNeuron extends TestCase {
+  private static float learningrate = 0.1f;
+  private static float bias = -1;
+  private static float theta = 0.8f;
+
+  public static class MyConvNeuron extends
+      ConvNeuron<Synapse<FloatWritable, FloatWritable>> {
+
+    @Override
+    public void forward(
+        Iterable<Synapse<FloatWritable, FloatWritable>> messages)
+        throws IOException {
+      float sum = 0;
+      for (Synapse<FloatWritable, FloatWritable> m : messages) {
+        sum += m.getInput() * m.getWeight();
+      }
+      sum += (bias * theta);
+      System.out.println(new CrossEntropy().apply(0.000001f, 1.0f));
+      this.feedforward(new Sigmoid().apply(sum));
+    }
+    
+    @Override
+    public void backward(
+        Iterable<Synapse<FloatWritable, FloatWritable>> messages)
+        throws IOException {
+      for (Synapse<FloatWritable, FloatWritable> m : messages) {
+        // Calculates error gradient for each neuron
+        float gradient = new Sigmoid().applyDerivative(this.getOutput())
+            * (m.getDelta() * m.getWeight());
+
+        // Propagates to lower layer
+        backpropagate(gradient);
+
+        // Weight corrections
+        float weight = learningrate * this.getOutput() * m.getDelta();
+        assertEquals(-0.006688235f, weight);
+        // this.push(weight);
+      }
+    }
+
+  }
+
+  public void testProp() throws IOException {
+    List<Synapse<FloatWritable, FloatWritable>> x = new ArrayList<Synapse<FloatWritable,
FloatWritable>>();
+    x.add(new Synapse<FloatWritable, FloatWritable>(new FloatWritable(1.0f),
+        new FloatWritable(0.5f)));
+    x.add(new Synapse<FloatWritable, FloatWritable>(new FloatWritable(1.0f),
+        new FloatWritable(0.4f)));
+
+    MyConvNeuron n = new MyConvNeuron();
+    n.forward(x);
+    assertEquals(0.5249792f, n.getOutput());
+
+    x.clear();
+    x.add(new Synapse<FloatWritable, FloatWritable>(new FloatWritable(
+        -0.1274f), new FloatWritable(-1.2f)));
+    n.backward(x);
+  }
+
+}


Mime
View raw message