ignite-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ch...@apache.org
Subject [4/4] ignite git commit: IGNITE-7322: Return authorship of 7322 and 6783, step 2.
Date Tue, 09 Jan 2018 19:14:41 GMT
IGNITE-7322: Return authorship of 7322 and 6783, step 2.

this closes #3345


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/4e24927e
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/4e24927e
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/4e24927e

Branch: refs/heads/master
Commit: 4e24927ee7492ddb05ea1c55cdf9c8dc3506506f
Parents: cb20a2b
Author: artemmalykh <amalykh@gridgain.com>
Authored: Tue Jan 9 22:14:26 2018 +0300
Committer: Yury Babak <ybabak@gridgain.com>
Committed: Tue Jan 9 22:14:26 2018 +0300

----------------------------------------------------------------------
 .../examples/ml/nn/MLPGroupTrainerExample.java  | 141 ++++++++
 .../examples/ml/nn/MLPLocalTrainerExample.java  | 161 +++++++++
 .../ignite/examples/ml/nn/package-info.java     |  22 ++
 .../org/apache/ignite/ml/math/Isomorphism.java  |  69 ++++
 .../org/apache/ignite/ml/math/VectorUtils.java  |  20 +
 .../ignite/ml/math/functions/Functions.java     |  66 +++-
 .../functions/IgniteCurriedTriFunction.java     |  28 ++
 .../ignite/ml/nn/LabeledVectorsCache.java       |  63 ++++
 .../ignite/ml/nn/LocalBatchTrainerInput.java    |   7 +-
 .../org/apache/ignite/ml/nn/LossFunctions.java  |  28 +-
 .../ml/nn/MLPGroupUpdateTrainerCacheInput.java  | 132 +++++++
 .../java/org/apache/ignite/ml/nn/MLPLayer.java  |   3 +-
 .../ignite/ml/nn/MultilayerPerceptron.java      |  16 +-
 .../ml/nn/architecture/LayerArchitecture.java   |   5 +-
 .../ml/nn/architecture/MLPArchitecture.java     |   9 +-
 .../ml/nn/initializers/RandomInitializer.java   |   2 +-
 .../AbstractMLPGroupUpdateTrainerInput.java     |  60 +++
 .../ml/nn/trainers/distributed/MLPCache.java    |  91 +++++
 .../distributed/MLPGroupTrainingCacheValue.java |  48 +++
 .../distributed/MLPGroupUpdateTrainer.java      | 362 +++++++++++++++++++
 .../MLPGroupUpdateTrainerDataCache.java         |  77 ++++
 .../MLPGroupUpdateTrainerLocalContext.java      | 117 ++++++
 .../MLPGroupUpdateTrainingContext.java          |  64 ++++
 .../distributed/MLPGroupUpdateTrainingData.java |  88 +++++
 .../MLPGroupUpdateTrainingLoopData.java         | 116 ++++++
 .../trainers/distributed/MLPMetaoptimizer.java  |  75 ++++
 .../ml/nn/trainers/local/LocalBatchTrainer.java |  19 +-
 .../nn/trainers/local/MLPLocalBatchTrainer.java |  17 +-
 .../ml/nn/updaters/BaseSmoothParametrized.java  |   6 +-
 .../ml/nn/updaters/NesterovParameterUpdate.java |  94 +++++
 .../nn/updaters/NesterovUpdateCalculator.java   |  85 +++++
 .../ignite/ml/nn/updaters/NesterovUpdater.java  |  76 ----
 .../ml/nn/updaters/NesterovUpdaterParams.java   |  67 ----
 .../nn/updaters/ParameterUpdateCalculator.java  |  58 +++
 .../ignite/ml/nn/updaters/ParameterUpdater.java |  51 ---
 .../ml/nn/updaters/RPropParameterUpdate.java    | 228 ++++++++++++
 .../ml/nn/updaters/RPropUpdateCalculator.java   | 151 ++++++++
 .../ignite/ml/nn/updaters/RPropUpdater.java     | 148 --------
 .../ml/nn/updaters/RPropUpdaterParams.java      | 135 -------
 .../ml/nn/updaters/SimpleGDParameter.java       |  77 ++++
 .../ignite/ml/nn/updaters/SimpleGDParams.java   |  65 ----
 .../nn/updaters/SimpleGDUpdateCalculator.java   |  66 ++++
 .../ignite/ml/nn/updaters/SimpleGDUpdater.java  |  60 ---
 .../ml/nn/updaters/SmoothParametrized.java      |   5 +-
 .../ignite/ml/nn/updaters/UpdaterParams.java    |  32 --
 .../org/apache/ignite/ml/trainers/Trainer.java  |  30 ++
 .../trainers/group/BaseLocalProcessorJob.java   | 145 ++++++++
 .../ignite/ml/trainers/group/ConstModel.java    |  46 +++
 .../ignite/ml/trainers/group/GroupTrainer.java  | 208 +++++++++++
 .../group/GroupTrainerBaseProcessorTask.java    | 143 ++++++++
 .../ml/trainers/group/GroupTrainerCacheKey.java | 125 +++++++
 .../group/GroupTrainerEntriesProcessorTask.java |  64 ++++
 .../ml/trainers/group/GroupTrainerInput.java    |  37 ++
 .../group/GroupTrainerKeysProcessorTask.java    |  62 ++++
 .../ml/trainers/group/GroupTrainingContext.java |  98 +++++
 .../group/LocalEntriesProcessorJob.java         |  85 +++++
 .../trainers/group/LocalKeysProcessorJob.java   |  78 ++++
 .../ignite/ml/trainers/group/Metaoptimizer.java |  93 +++++
 .../group/MetaoptimizerDistributedStep.java     |  88 +++++
 .../group/MetaoptimizerGroupTrainer.java        | 132 +++++++
 .../ml/trainers/group/ResultAndUpdates.java     | 177 +++++++++
 .../ignite/ml/trainers/group/chain/Chains.java  |  56 +++
 .../trainers/group/chain/ComputationsChain.java | 246 +++++++++++++
 .../chain/DistributedEntryProcessingStep.java   |  34 ++
 .../chain/DistributedKeyProcessingStep.java     |  33 ++
 .../trainers/group/chain/DistributedStep.java   |  70 ++++
 .../trainers/group/chain/EntryAndContext.java   |  70 ++++
 .../trainers/group/chain/HasTrainingUUID.java   |  32 ++
 .../ml/trainers/group/chain/KeyAndContext.java  |  67 ++++
 .../ml/trainers/group/chain/package-info.java   |  22 ++
 .../ignite/ml/trainers/group/package-info.java  |  22 ++
 .../apache/ignite/ml/trainers/package-info.java |  22 ++
 .../org/apache/ignite/ml/util/MnistUtils.java   |   8 +-
 .../org/apache/ignite/ml/IgniteMLTestSuite.java |   4 +-
 .../java/org/apache/ignite/ml/TestUtils.java    |  11 +
 .../ignite/ml/nn/MLPConstInitializer.java       |  10 +-
 .../ignite/ml/nn/MLPGroupTrainerTest.java       | 126 +++++++
 .../ignite/ml/nn/MLPLocalTrainerTest.java       |  23 +-
 .../org/apache/ignite/ml/nn/MLPTestSuite.java   |   1 +
 .../ml/nn/SimpleMLPLocalBatchTrainerInput.java  |  29 +-
 .../apache/ignite/ml/nn/performance/Mnist.java  | 140 -------
 .../ml/nn/performance/MnistDistributed.java     | 150 ++++++++
 .../ignite/ml/nn/performance/MnistLocal.java    |  93 +++++
 .../ml/nn/performance/MnistMLPTestUtil.java     |  88 +++++
 .../group/DistributedWorkersChainTest.java      | 189 ++++++++++
 .../ml/trainers/group/GroupTrainerTest.java     |  90 +++++
 .../trainers/group/SimpleGroupTrainerInput.java |  63 ++++
 .../ml/trainers/group/TestGroupTrainer.java     | 144 ++++++++
 .../group/TestGroupTrainerLocalContext.java     |  85 +++++
 .../trainers/group/TestGroupTrainingCache.java  |  70 ++++
 .../group/TestGroupTrainingSecondCache.java     |  56 +++
 .../ml/trainers/group/TestLocalContext.java     |  51 +++
 .../ml/trainers/group/TestTrainingLoopStep.java |  65 ++++
 .../trainers/group/TrainersGroupTestSuite.java  |  32 ++
 .../IgniteCacheRandomOperationBenchmark.java    |   2 +-
 95 files changed, 6258 insertions(+), 867 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/examples/src/main/ml/org/apache/ignite/examples/ml/nn/MLPGroupTrainerExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/ml/org/apache/ignite/examples/ml/nn/MLPGroupTrainerExample.java b/examples/src/main/ml/org/apache/ignite/examples/ml/nn/MLPGroupTrainerExample.java
new file mode 100644
index 0000000..8d4a151
--- /dev/null
+++ b/examples/src/main/ml/org/apache/ignite/examples/ml/nn/MLPGroupTrainerExample.java
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.nn;
+
+import java.util.Random;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.IgniteDataStreamer;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.examples.ExampleNodeStartup;
+import org.apache.ignite.ml.math.Matrix;
+import org.apache.ignite.ml.math.StorageConstants;
+import org.apache.ignite.ml.math.Tracer;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix;
+import org.apache.ignite.ml.nn.Activators;
+import org.apache.ignite.ml.nn.LabeledVectorsCache;
+import org.apache.ignite.ml.nn.MLPGroupUpdateTrainerCacheInput;
+import org.apache.ignite.ml.nn.MultilayerPerceptron;
+import org.apache.ignite.ml.nn.architecture.MLPArchitecture;
+import org.apache.ignite.ml.nn.initializers.RandomInitializer;
+import org.apache.ignite.ml.nn.trainers.distributed.MLPGroupUpdateTrainer;
+import org.apache.ignite.ml.nn.updaters.RPropParameterUpdate;
+import org.apache.ignite.ml.structures.LabeledVector;
+import org.apache.ignite.thread.IgniteThread;
+
+/**
+ * <p>
+ * Example of using distributed {@link MultilayerPerceptron}.</p>
+ * <p>
+ * Remote nodes should always be started with special configuration file which
+ * enables P2P class loading: {@code 'ignite.{sh|bat} examples/config/example-ignite.xml'}.</p>
+ * <p>
+ * Alternatively you can run {@link ExampleNodeStartup} in another JVM which will start node
+ * with {@code examples/config/example-ignite.xml} configuration.</p>
+ */
+public class MLPGroupTrainerExample {
+    /**
+     * Executes example.
+     *
+     * @param args Command line arguments, none required.
+     */
+    public static void main(String[] args) throws InterruptedException {
+        // IMPL NOTE based on MLPGroupTrainerTest#testXOR
+        System.out.println(">>> Distributed  multilayer perceptron example started.");
+
+        // Start ignite grid.
+        try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
+            System.out.println(">>> Ignite grid started.");
+
+            // Create IgniteThread, we must work with SparseDistributedMatrix inside IgniteThread
+            // because we create ignite cache internally.
+            IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(),
+                MLPGroupTrainerExample.class.getSimpleName(), () -> {
+
+                int samplesCnt = 1000;
+
+                Matrix xorInputs = new DenseLocalOnHeapMatrix(
+                    new double[][] {{0.0, 0.0}, {0.0, 1.0}, {1.0, 0.0}, {1.0, 1.0}},
+                    StorageConstants.ROW_STORAGE_MODE).transpose();
+
+                Matrix xorOutputs = new DenseLocalOnHeapMatrix(
+                    new double[][] {{0.0}, {1.0}, {1.0}, {0.0}},
+                    StorageConstants.ROW_STORAGE_MODE).transpose();
+
+                MLPArchitecture conf = new MLPArchitecture(2).
+                    withAddedLayer(10, true, Activators.RELU).
+                    withAddedLayer(1, false, Activators.SIGMOID);
+
+                IgniteCache<Integer, LabeledVector<Vector, Vector>> cache = LabeledVectorsCache.createNew(ignite);
+                String cacheName = cache.getName();
+                Random rnd = new Random(12345L);
+
+                try (IgniteDataStreamer<Integer, LabeledVector<Vector, Vector>> streamer =
+                         ignite.dataStreamer(cacheName)) {
+                    streamer.perNodeBufferSize(10000);
+
+                    for (int i = 0; i < samplesCnt; i++) {
+                        int col = Math.abs(rnd.nextInt()) % 4;
+                        streamer.addData(i, new LabeledVector<>(xorInputs.getCol(col), xorOutputs.getCol(col)));
+                    }
+                }
+
+                int totalCnt = 100;
+                int failCnt = 0;
+                MLPGroupUpdateTrainer<RPropParameterUpdate> trainer = MLPGroupUpdateTrainer.getDefault(ignite).
+                    withSyncRate(3).
+                    withTolerance(0.001).
+                    withMaxGlobalSteps(1000);
+
+                for (int i = 0; i < totalCnt; i++) {
+
+                    MLPGroupUpdateTrainerCacheInput trainerInput = new MLPGroupUpdateTrainerCacheInput(conf,
+                        new RandomInitializer(rnd), 6, cache, 4);
+
+                    MultilayerPerceptron mlp = trainer.train(trainerInput);
+
+                    Matrix predict = mlp.apply(xorInputs);
+
+                    System.out.println(">>> Prediction data at step " + i + " of total " + totalCnt + ":");
+
+                    Tracer.showAscii(predict);
+
+                    System.out.println("Difference estimate: " + xorOutputs.getRow(0).minus(predict.getRow(0)).kNorm(2));
+
+                    failCnt += closeEnough(xorOutputs.getRow(0), predict.getRow(0)) ? 0 : 1;
+                }
+
+                double failRatio = (double)failCnt / totalCnt;
+
+                System.out.println("\n>>> Fail percentage: " + (failRatio * 100) + "%.");
+
+                System.out.println("\n>>> Distributed  multilayer perceptron example completed.");
+            });
+
+            igniteThread.start();
+
+            igniteThread.join();
+        }
+    }
+
+    /** */
+    private static boolean closeEnough(Vector v1, Vector v2) {
+        return v1.minus(v2).kNorm(2) < 5E-1;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/examples/src/main/ml/org/apache/ignite/examples/ml/nn/MLPLocalTrainerExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/ml/org/apache/ignite/examples/ml/nn/MLPLocalTrainerExample.java b/examples/src/main/ml/org/apache/ignite/examples/ml/nn/MLPLocalTrainerExample.java
new file mode 100644
index 0000000..3f4adc4
--- /dev/null
+++ b/examples/src/main/ml/org/apache/ignite/examples/ml/nn/MLPLocalTrainerExample.java
@@ -0,0 +1,161 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.examples.ml.nn;
+
+import java.util.Random;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.math.Matrix;
+import org.apache.ignite.ml.math.StorageConstants;
+import org.apache.ignite.ml.math.Tracer;
+import org.apache.ignite.ml.math.functions.IgniteSupplier;
+import org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix;
+import org.apache.ignite.ml.nn.Activators;
+import org.apache.ignite.ml.nn.LocalBatchTrainerInput;
+import org.apache.ignite.ml.nn.LossFunctions;
+import org.apache.ignite.ml.nn.MultilayerPerceptron;
+import org.apache.ignite.ml.nn.architecture.MLPArchitecture;
+import org.apache.ignite.ml.nn.initializers.RandomInitializer;
+import org.apache.ignite.ml.nn.trainers.local.MLPLocalBatchTrainer;
+import org.apache.ignite.ml.nn.updaters.RPropUpdateCalculator;
+import org.apache.ignite.ml.util.Utils;
+
+/**
+ * Example of using local {@link MultilayerPerceptron}.
+ */
+public class MLPLocalTrainerExample {
+    /**
+     * Executes example.
+     *
+     * @param args Command line arguments, none required.
+     */
+    public static void main(String[] args) {
+        // IMPL NOTE based on MLPLocalTrainerTest#testXORRProp
+        System.out.println(">>> Local multilayer perceptron example started.");
+
+        Matrix xorInputs = new DenseLocalOnHeapMatrix(new double[][] {{0.0, 0.0}, {0.0, 1.0}, {1.0, 0.0}, {1.0, 1.0}},
+            StorageConstants.ROW_STORAGE_MODE).transpose();
+
+        System.out.println("\n>>> Input data:");
+
+        Tracer.showAscii(xorInputs);
+
+        Matrix xorOutputs = new DenseLocalOnHeapMatrix(new double[][] {{0.0}, {1.0}, {1.0}, {0.0}},
+            StorageConstants.ROW_STORAGE_MODE).transpose();
+
+        MLPArchitecture conf = new MLPArchitecture(2).
+            withAddedLayer(10, true, Activators.RELU).
+            withAddedLayer(1, false, Activators.SIGMOID);
+
+        SimpleMLPLocalBatchTrainerInput trainerInput = new SimpleMLPLocalBatchTrainerInput(conf,
+            new Random(1234L), xorInputs, xorOutputs, 4);
+
+        System.out.println("\n>>> Perform training.");
+
+        MultilayerPerceptron mlp = new MLPLocalBatchTrainer<>(LossFunctions.MSE,
+            () -> new RPropUpdateCalculator<>(),
+            0.0001,
+            16000).train(trainerInput);
+
+        System.out.println("\n>>> Apply model.");
+
+        Matrix predict = mlp.apply(xorInputs);
+
+        System.out.println("\n>>> Predicted data:");
+
+        Tracer.showAscii(predict);
+
+        System.out.println("\n>>> Reference expected data:");
+
+        Tracer.showAscii(xorOutputs);
+
+        System.out.println("\n>>> Difference estimate: " + xorOutputs.getRow(0).minus(predict.getRow(0)).kNorm(2));
+
+        System.out.println("\n>>> Local multilayer perceptron example completed.");
+    }
+
+    /**
+     * Class for local batch training of {@link MultilayerPerceptron}.
+     *
+     * It is constructed from two matrices: one containing inputs of function to approximate and other containing ground truth
+     * values of this function for corresponding inputs.
+     *
+     * We fix batch size given by this input by some constant value.
+     */
+    private static class SimpleMLPLocalBatchTrainerInput implements LocalBatchTrainerInput<MultilayerPerceptron> {
+        /**
+         * Multilayer perceptron to be trained.
+         */
+        private final MultilayerPerceptron mlp;
+
+        /**
+         * Inputs stored as columns.
+         */
+        private final Matrix inputs;
+
+        /**
+         * Ground truths stored as columns.
+         */
+        private final Matrix groundTruth;
+
+        /**
+         * Size of batch returned on each step.
+         */
+        private final int batchSize;
+
+        /**
+         * Construct instance of this class.
+         *
+         * @param arch Architecture of multilayer perceptron.
+         * @param rnd Random numbers generator.
+         * @param inputs Inputs stored as columns.
+         * @param groundTruth Ground truth stored as columns.
+         * @param batchSize Size of batch returned on each step.
+         */
+        SimpleMLPLocalBatchTrainerInput(MLPArchitecture arch, Random rnd, Matrix inputs, Matrix groundTruth, int batchSize) {
+            this.mlp = new MultilayerPerceptron(arch, new RandomInitializer(rnd));
+            this.inputs = inputs;
+            this.groundTruth = groundTruth;
+            this.batchSize = batchSize;
+        }
+
+        /** {@inheritDoc} */
+        @Override public IgniteSupplier<IgniteBiTuple<Matrix, Matrix>> batchSupplier() {
+            return () -> {
+                int inputRowSize = inputs.rowSize();
+                int outputRowSize = groundTruth.rowSize();
+
+                Matrix vectors = new DenseLocalOnHeapMatrix(inputRowSize, batchSize);
+                Matrix labels = new DenseLocalOnHeapMatrix(outputRowSize, batchSize);
+
+                int[] samples = Utils.selectKDistinct(inputs.columnSize(), batchSize);
+
+                for (int i = 0; i < batchSize; i++) {
+                    vectors.assignColumn(i, inputs.getCol(samples[i]));
+                    labels.assignColumn(i, groundTruth.getCol(samples[i]));
+                }
+
+                return new IgniteBiTuple<>(vectors, labels);
+            };
+        }
+
+        /** {@inheritDoc} */
+        @Override public MultilayerPerceptron mdl() {
+            return mlp;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/examples/src/main/ml/org/apache/ignite/examples/ml/nn/package-info.java
----------------------------------------------------------------------
diff --git a/examples/src/main/ml/org/apache/ignite/examples/ml/nn/package-info.java b/examples/src/main/ml/org/apache/ignite/examples/ml/nn/package-info.java
new file mode 100644
index 0000000..3a0ed29
--- /dev/null
+++ b/examples/src/main/ml/org/apache/ignite/examples/ml/nn/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <!-- Package description. -->
+ * ML neural network examples.
+ */
+package org.apache.ignite.examples.ml.nn;

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/math/Isomorphism.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/Isomorphism.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/Isomorphism.java
new file mode 100644
index 0000000..6f17e3a
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/Isomorphism.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.math;
+
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+
+/**
+ *  Function from {@code K} to {@code V} with defined inverse.
+ *
+ * @param <K>
+ * @param <V>
+ */
+public class Isomorphism<K, V> {
+    /** */
+    private IgniteFunction<K, V> forward;
+    /** */
+    private IgniteFunction<V, K> back;
+
+    /**
+     * Identity isomorphism.
+     */
+    public static <K> Isomorphism<K, K> id() {
+        return new Isomorphism<>(a -> a, a -> a);
+    }
+
+    /**
+     * Build isomorphism with forward and backward functions.
+     *
+     * @param forward Forward.
+     * @param back Back.
+     */
+    public Isomorphism(IgniteFunction<K, V> forward, IgniteFunction<V, K> back) {
+        this.forward = forward;
+        this.back = back;
+    }
+
+    /**
+     * Forward function.
+     *
+     * @param k K.
+     */
+    public V forward(K k) {
+        return forward.apply(k);
+    }
+
+    /**
+     * Backward function.
+     *
+     * @param v V.
+     */
+    public K back(V v) {
+        return back.apply(v);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/math/VectorUtils.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/VectorUtils.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/VectorUtils.java
index 7268365..2f51245 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/VectorUtils.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/VectorUtils.java
@@ -135,4 +135,24 @@ public class VectorUtils {
 
         return res;
     }
+
+    /**
+     * Get copy of part of given length of given vector starting from given offset.
+     *
+     * @param v Vector to copy part from.
+     * @param off Offset.
+     * @param len Length.
+     * @return Copy of part of given length of given vector starting from given offset.
+     */
+    public static Vector copyPart(Vector v, int off, int len) {
+        assert off >= 0;
+        assert len <= v.size();
+
+        Vector res = v.like(len);
+
+        for (int i = 0; i < len; i++)
+            res.setX(i, v.getX(off + i));
+
+        return res;
+    }
 }

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/Functions.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/Functions.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/Functions.java
index fa7ee76..f723166 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/Functions.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/Functions.java
@@ -100,7 +100,7 @@ public final class Functions {
      * @return Minimum between {@code a} and {@code b} in terms of comparator {@code f}.
      */
     public static <T> T MIN_GENERIC(T a, T b, Comparator<T> f) {
-        return f.compare(a, b) > 0 ? a : b;
+        return f.compare(a, b) < 0 ? a : b;
     }
 
     /** Function that returns {@code min(abs(a), abs(b))}. */
@@ -215,15 +215,73 @@ public final class Functions {
     }
 
     /**
-     * Curry bifunction.
+     * Curry bi-function.
      *
-     * @param f Bifunction to curry.
+     * @param f Bi-function to curry.
      * @param <A> Type of first argument of {@code f}.
      * @param <B> Type of second argument of {@code f}.
      * @param <C> Return type of {@code f}.
-     * @return Curried bifunction.
+     * @return Curried bi-function.
      */
     public static <A, B, C> IgniteCurriedBiFunction<A, B, C> curry(BiFunction<A, B, C> f) {
         return a -> b -> f.apply(a, b);
     }
+
+    /**
+     * Transform bi-function of the form (a, b) -> c into a function of form a -> (b -> c).
+     *
+     * @param f Function to be curried.
+     * @param <A> Type of first argument of function to be transformed.
+     * @param <B> Type of second argument of function to be transformed.
+     * @param <C> Type of third argument of function to be transformed.
+     * @return Curried bi-function.
+     */
+    public static <A, B, C> IgniteCurriedBiFunction<A, B, C> curry(IgniteBiFunction<A, B, C> f) {
+        return a -> b -> f.apply(a, b);
+    }
+
+    /**
+     * Transform tri-function of the form (a, b, c) -> d into a function of form a -> (b -> (c -> d)).
+     *
+     * @param f Function to be curried.
+     * @param <A> Type of first argument of function to be transformed.
+     * @param <B> Type of second argument of function to be transformed.
+     * @param <C> Type of third argument of function to be transformed.
+     * @param <D> Type output of function to be transformed.
+     * @return Curried tri-function.
+     */
+    public static <A, B, C, D> IgniteCurriedTriFunction<A, B, C, D> curry(IgniteTriFunction<A, B, C, D> f) {
+        return a -> b -> c -> f.apply(a, b, c);
+    }
+
+    /**
+     * Transform function of form a -> b into a -> (() -> b).
+     *
+     * @param f Function to be transformed.
+     * @param <A> Type of input of function to be transformed.
+     * @param <B> Type of output of function to be transformed.
+     * @return Transformed function.
+     */
+    public static <A, B> IgniteFunction<A, IgniteSupplier<B>> outputSupplier(IgniteFunction<A, B> f) {
+        return a -> {
+            B res = f.apply(a);
+            return () -> res;
+        };
+    }
+
+    /**
+     * Transform function of form (a, b) -> c into (a, b) - () -> c.
+     *
+     * @param f Function to be transformed.
+     * @param <A> Type of first argument of function to be transformed.
+     * @param <B> Type of second argument of function to be transformed.
+     * @param <C> Type of output of function to be transformed.
+     * @return Transformed function.
+     */
+    public static <A, B, C> IgniteBiFunction<A, B, IgniteSupplier<C>> outputSupplier(IgniteBiFunction<A, B, C> f) {
+        return (a, b) -> {
+            C res = f.apply(a, b);
+            return () -> res;
+        };
+    }
 }

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteCurriedTriFunction.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteCurriedTriFunction.java b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteCurriedTriFunction.java
new file mode 100644
index 0000000..cddffcd
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/math/functions/IgniteCurriedTriFunction.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.math.functions;
+
+import java.io.Serializable;
+
+/**
+ * Serializable curried tri-function.
+ *
+ * @see IgniteCurriedBiFunction
+ */
+public interface IgniteCurriedTriFunction<A, B, C, D> extends IgniteFunction<A, IgniteCurriedBiFunction<B, C, D>>, Serializable {
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/LabeledVectorsCache.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/LabeledVectorsCache.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/LabeledVectorsCache.java
new file mode 100644
index 0000000..07a6e2a
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/LabeledVectorsCache.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.nn;
+
+import java.util.UUID;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.cache.CacheAtomicityMode;
+import org.apache.ignite.cache.CacheMode;
+import org.apache.ignite.cache.CacheWriteSynchronizationMode;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.structures.LabeledVector;
+
+/**
+ * Class for working with labeled vectors cache.
+ */
+public class LabeledVectorsCache {
+    /**
+     * Create new labeled vectors cache.
+     *
+     * @param ignite Ignite instance.
+     * @return new labeled vectors cache.
+     */
+    public static IgniteCache<Integer, LabeledVector<Vector, Vector>> createNew(Ignite ignite) {
+        CacheConfiguration<Integer, LabeledVector<Vector, Vector>> cfg = new CacheConfiguration<>();
+
+        // Write to primary.
+        cfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.PRIMARY_SYNC);
+
+        // Atomic transactions only.
+        cfg.setAtomicityMode(CacheAtomicityMode.ATOMIC);
+
+        // No copying of values.
+        cfg.setCopyOnRead(false);
+
+        // Cache is partitioned.
+        cfg.setCacheMode(CacheMode.PARTITIONED);
+
+        cfg.setBackups(0);
+
+        cfg.setOnheapCacheEnabled(true);
+
+        cfg.setName("LBLD_VECS_" + UUID.randomUUID().toString());
+
+        return ignite.getOrCreateCache(cfg);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/LocalBatchTrainerInput.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/LocalBatchTrainerInput.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/LocalBatchTrainerInput.java
index 4574841..3a87d02 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/nn/LocalBatchTrainerInput.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/LocalBatchTrainerInput.java
@@ -20,17 +20,18 @@ package org.apache.ignite.ml.nn;
 import org.apache.ignite.lang.IgniteBiTuple;
 import org.apache.ignite.ml.Model;
 import org.apache.ignite.ml.math.Matrix;
+import org.apache.ignite.ml.math.functions.IgniteSupplier;
 
 /**
  * Interface for classes containing input parameters for LocalBatchTrainer.
  */
 public interface LocalBatchTrainerInput<M extends Model<Matrix, Matrix>> {
     /**
-     * Get next batch in form of matrix of inputs and matrix of outputs.
+     * Get supplier of next batch in form of matrix of inputs and matrix of outputs.
      *
-     * @return Next batch.
+     * @return Supplier of next batch.
      */
-    IgniteBiTuple<Matrix, Matrix> getBatch();
+    IgniteSupplier<IgniteBiTuple<Matrix, Matrix>> batchSupplier();
 
     /**
      * Model to train.

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/LossFunctions.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/LossFunctions.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/LossFunctions.java
index 652072c..dff239c 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/nn/LossFunctions.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/LossFunctions.java
@@ -30,18 +30,18 @@ public class LossFunctions {
      */
     public static IgniteFunction<Vector, IgniteDifferentiableVectorToDoubleFunction> MSE = groundTruth ->
         new IgniteDifferentiableVectorToDoubleFunction() {
-        /** {@inheritDoc} */
-        @Override public Vector differential(Vector pnt) {
-            double multiplier = 2.0 / pnt.size();
-            return pnt.minus(groundTruth).times(multiplier);
-        }
+            /** {@inheritDoc} */
+            @Override public Vector differential(Vector pnt) {
+                double multiplier = 2.0 / pnt.size();
+                return pnt.minus(groundTruth).times(multiplier);
+            }
 
-        /** {@inheritDoc} */
-        @Override public Double apply(Vector vector) {
-            return groundTruth.copy().map(vector, (a, b) -> {
-                double diff = a - b;
-                return diff * diff;
-            }).sum() / (vector.size());
-        }
-    };
-}
\ No newline at end of file
+            /** {@inheritDoc} */
+            @Override public Double apply(Vector vector) {
+                return groundTruth.copy().map(vector, (a, b) -> {
+                    double diff = a - b;
+                    return diff * diff;
+                }).sum() / (vector.size());
+            }
+        };
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/MLPGroupUpdateTrainerCacheInput.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/MLPGroupUpdateTrainerCacheInput.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/MLPGroupUpdateTrainerCacheInput.java
new file mode 100644
index 0000000..14db261
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/MLPGroupUpdateTrainerCacheInput.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.nn;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.affinity.Affinity;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.math.Matrix;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.functions.IgniteSupplier;
+import org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix;
+import org.apache.ignite.ml.nn.architecture.MLPArchitecture;
+import org.apache.ignite.ml.nn.initializers.MLPInitializer;
+import org.apache.ignite.ml.nn.trainers.distributed.AbstractMLPGroupUpdateTrainerInput;
+import org.apache.ignite.ml.nn.trainers.distributed.MLPGroupUpdateTrainer;
+import org.apache.ignite.ml.structures.LabeledVector;
+import org.apache.ignite.ml.util.Utils;
+
+/**
+ * Input for {@link MLPGroupUpdateTrainer} where batches are taken from cache of labeled vectors.
+ */
+public class MLPGroupUpdateTrainerCacheInput extends AbstractMLPGroupUpdateTrainerInput {
+    /**
+     * Cache of labeled vectors.
+     */
+    private final IgniteCache<Integer, LabeledVector<Vector, Vector>> cache;
+
+    /**
+     * Size of batch to return on each training iteration.
+     */
+    private final int batchSize;
+
+    /**
+     * Multilayer perceptron.
+     */
+    private final MultilayerPerceptron mlp;
+
+    /**
+     * Construct instance of this class with given parameters.
+     *
+     * @param arch Architecture of multilayer perceptron.
+     * @param init Initializer of multilayer perceptron.
+     * @param networksCnt Count of networks to be trained in parallel by {@link MLPGroupUpdateTrainer}.
+     * @param cache Cache with labeled vectors.
+     * @param batchSize Size of batch to return on each training iteration.
+     */
+    public MLPGroupUpdateTrainerCacheInput(MLPArchitecture arch, MLPInitializer init,
+        int networksCnt, IgniteCache<Integer, LabeledVector<Vector, Vector>> cache,
+        int batchSize) {
+        super(networksCnt);
+
+        this.batchSize = batchSize;
+        this.cache = cache;
+        this.mlp = new MultilayerPerceptron(arch, init);
+    }
+
+    /**
+     * Construct instance of this class with given parameters and default initializer.
+     *
+     * @param arch Architecture of multilayer perceptron.
+     * @param networksCnt Count of networks to be trained in parallel by {@link MLPGroupUpdateTrainer}.
+     * @param cache Cache with labeled vectors.
+     * @param batchSize Size of batch to return on each training iteration.
+     */
+    public MLPGroupUpdateTrainerCacheInput(MLPArchitecture arch, int networksCnt,
+        IgniteCache<Integer, LabeledVector<Vector, Vector>> cache,
+        int batchSize) {
+        this(arch, null, networksCnt, cache, batchSize);
+    }
+
+    /** {@inheritDoc} */
+    @Override public IgniteSupplier<IgniteBiTuple<Matrix, Matrix>> batchSupplier() {
+        String cName = cache.getName();
+        int bs = batchSize;
+
+        return () -> {
+            Ignite ignite = Ignition.localIgnite();
+            IgniteCache<Integer, LabeledVector<Vector, Vector>> cache = ignite.getOrCreateCache(cName);
+            int total = cache.size();
+            Affinity<Integer> affinity = ignite.affinity(cName);
+
+            List<Integer> allKeys = IntStream.range(0, total).boxed().collect(Collectors.toList());
+            List<Integer> keys = new ArrayList<>(affinity.mapKeysToNodes(allKeys).get(ignite.cluster().localNode()));
+
+            int locKeysCnt = keys.size();
+
+            int[] selected = Utils.selectKDistinct(locKeysCnt, Math.min(bs, locKeysCnt));
+
+            // Get dimensions of vectors in cache. We suppose that every feature vector has
+            // same dimension d 1 and every label has the same dimension d2.
+            LabeledVector<Vector, Vector> dimEntry = cache.get(keys.get(selected[0]));
+
+            Matrix inputs = new DenseLocalOnHeapMatrix(dimEntry.features().size(), bs);
+            Matrix groundTruth = new DenseLocalOnHeapMatrix(dimEntry.label().size(), bs);
+
+            for (int i = 0; i < selected.length; i++) {
+                LabeledVector<Vector, Vector> labeled = cache.get(selected[i]);
+
+                inputs.assignColumn(i, labeled.features());
+                groundTruth.assignColumn(i, labeled.label());
+            }
+
+            return new IgniteBiTuple<>(inputs, groundTruth);
+        };
+    }
+
+    /** {@inheritDoc} */
+    @Override public MultilayerPerceptron mdl() {
+        return mlp;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/MLPLayer.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/MLPLayer.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/MLPLayer.java
index 621dc9f..b5120c6 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/nn/MLPLayer.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/MLPLayer.java
@@ -17,13 +17,14 @@
 
 package org.apache.ignite.ml.nn;
 
+import java.io.Serializable;
 import org.apache.ignite.ml.math.Matrix;
 import org.apache.ignite.ml.math.Vector;
 
 /**
  * Class containing information about layer.
  */
-public class MLPLayer {
+public class MLPLayer implements Serializable {
     /**
      * Weights matrix.
      */

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/MultilayerPerceptron.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/MultilayerPerceptron.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/MultilayerPerceptron.java
index e372d96..d55e0e9 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/nn/MultilayerPerceptron.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/MultilayerPerceptron.java
@@ -17,6 +17,7 @@
 
 package org.apache.ignite.ml.nn;
 
+import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.LinkedList;
 import java.util.List;
@@ -41,7 +42,7 @@ import static org.apache.ignite.ml.math.util.MatrixUtil.elementWiseTimes;
 /**
  * Class encapsulating logic of multilayer perceptron.
  */
-public class MultilayerPerceptron implements Model<Matrix, Matrix>, SmoothParametrized<MultilayerPerceptron> {
+public class MultilayerPerceptron implements Model<Matrix, Matrix>, SmoothParametrized<MultilayerPerceptron>, Serializable {
     /**
      * This MLP architecture.
      */
@@ -68,7 +69,7 @@ public class MultilayerPerceptron implements Model<Matrix, Matrix>, SmoothParame
         architecture = arch;
         below = null;
 
-        initLayers(initializer);
+        initLayers(initializer != null ? initializer : new RandomInitializer(new Random()));
     }
 
     /**
@@ -77,11 +78,7 @@ public class MultilayerPerceptron implements Model<Matrix, Matrix>, SmoothParame
      * @param arch Architecture.
      */
     public MultilayerPerceptron(MLPArchitecture arch) {
-        layers = new ArrayList<>(arch.layersCount() + 1);
-        architecture = arch;
-        below = null;
-
-        initLayers(new RandomInitializer(new Random()));
+        this(arch, null);
     }
 
     /**
@@ -389,7 +386,7 @@ public class MultilayerPerceptron implements Model<Matrix, Matrix>, SmoothParame
             if (hasBiases(layer))
                 db = dz.foldRows(Vector::sum).times(invBatchSize);
 
-            // Because we go from last layer, add each layer to the begining.
+            // Because we go from last layer, add each layer to the beginning.
             layersParameters.add(0, new MLPLayer(dw, db));
         }
 
@@ -555,7 +552,8 @@ public class MultilayerPerceptron implements Model<Matrix, Matrix>, SmoothParame
      * @param nonlinearity Nonlinearity of current layer.
      * @return Gradients matrix.
      */
-    private Matrix differentiateNonlinearity(Matrix linearOut, IgniteDifferentiableDoubleToDoubleFunction nonlinearity) {
+    private Matrix differentiateNonlinearity(Matrix linearOut,
+        IgniteDifferentiableDoubleToDoubleFunction nonlinearity) {
         Matrix diff = linearOut.copy();
 
         diff.map(nonlinearity::differential);

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/architecture/LayerArchitecture.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/architecture/LayerArchitecture.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/architecture/LayerArchitecture.java
index 31a3e9a..4ede888 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/nn/architecture/LayerArchitecture.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/architecture/LayerArchitecture.java
@@ -17,10 +17,12 @@
 
 package org.apache.ignite.ml.nn.architecture;
 
+import java.io.Serializable;
+
 /**
  * Layer architecture.
  */
-public class LayerArchitecture {
+public class LayerArchitecture implements Serializable {
     /**
      * Count of neurons on layer.
      */
@@ -37,6 +39,7 @@ public class LayerArchitecture {
 
     /**
      * Get count of neurons in layer.
+     *
      * @return Count of neurons in layer.
      */
     public int neuronsCount() {

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/architecture/MLPArchitecture.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/architecture/MLPArchitecture.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/architecture/MLPArchitecture.java
index 3ef7b61..4018328 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/nn/architecture/MLPArchitecture.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/architecture/MLPArchitecture.java
@@ -17,6 +17,7 @@
 
 package org.apache.ignite.ml.nn.architecture;
 
+import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
 import org.apache.ignite.ml.math.functions.IgniteDifferentiableDoubleToDoubleFunction;
@@ -24,7 +25,7 @@ import org.apache.ignite.ml.math.functions.IgniteDifferentiableDoubleToDoubleFun
 /**
  * Class containing information about architecture of MLP.
  */
-public class MLPArchitecture {
+public class MLPArchitecture implements Serializable {
     /**
      * List of layers architectures.
      */
@@ -51,6 +52,7 @@ public class MLPArchitecture {
 
     /**
      * Count of layers in MLP.
+     *
      * @return Layers count.
      */
     public int layersCount() {
@@ -59,6 +61,7 @@ public class MLPArchitecture {
 
     /**
      * Size of input of MLP.
+     *
      * @return Size of input.
      */
     public int inputSize() {
@@ -67,6 +70,7 @@ public class MLPArchitecture {
 
     /**
      * Size of output of MLP.
+     *
      * @return Size of output.
      */
     public int outputSize() {
@@ -81,7 +85,8 @@ public class MLPArchitecture {
      * @param f Activation function of a new layer.
      * @return New MLP architecture with new layer added on top of all this architecture layers.
      */
-    public MLPArchitecture withAddedLayer(int neuronsCnt, boolean hasBias, IgniteDifferentiableDoubleToDoubleFunction f) {
+    public MLPArchitecture withAddedLayer(int neuronsCnt, boolean hasBias,
+        IgniteDifferentiableDoubleToDoubleFunction f) {
         ArrayList<LayerArchitecture> newLayers = new ArrayList<>(layers);
 
         newLayers.add(new TransformationLayerArchitecture(neuronsCnt, hasBias, f));

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/initializers/RandomInitializer.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/initializers/RandomInitializer.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/initializers/RandomInitializer.java
index 18cb8a6..25c27cd 100644
--- a/modules/ml/src/main/java/org/apache/ignite/ml/nn/initializers/RandomInitializer.java
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/initializers/RandomInitializer.java
@@ -28,7 +28,7 @@ public class RandomInitializer implements MLPInitializer {
     /**
      * RNG.
      */
-    Random rnd;
+    private final Random rnd;
 
     /**
      * Construct RandomInitializer from given RNG.

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/AbstractMLPGroupUpdateTrainerInput.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/AbstractMLPGroupUpdateTrainerInput.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/AbstractMLPGroupUpdateTrainerInput.java
new file mode 100644
index 0000000..ed65af7
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/AbstractMLPGroupUpdateTrainerInput.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.nn.trainers.distributed;
+
+import java.util.UUID;
+import java.util.stream.Stream;
+import org.apache.ignite.ml.math.functions.IgniteSupplier;
+import org.apache.ignite.ml.nn.LocalBatchTrainerInput;
+import org.apache.ignite.ml.nn.MultilayerPerceptron;
+import org.apache.ignite.ml.trainers.group.GroupTrainerCacheKey;
+import org.apache.ignite.ml.trainers.group.GroupTrainerInput;
+
+/**
+ * Abstract class for {@link MLPGroupUpdateTrainer} inputs.
+ */
+public abstract class AbstractMLPGroupUpdateTrainerInput implements GroupTrainerInput<Void>, LocalBatchTrainerInput<MultilayerPerceptron> {
+    /**
+     * Count of networks to be trained in parallel.
+     */
+    private final int networksCnt;
+
+    /**
+     * Construct instance of this class with given parameters.
+     *
+     * @param networksCnt Count of networks to be trained in parallel.
+     */
+    public AbstractMLPGroupUpdateTrainerInput(int networksCnt) {
+        this.networksCnt = networksCnt;
+    }
+
+    /** {@inheritDoc} */
+    @Override public IgniteSupplier<Stream<GroupTrainerCacheKey<Void>>> initialKeys(UUID trainingUUID) {
+        final int nt = networksCnt; // IMPL NOTE intermediate variable is intended to have smaller lambda
+        return () -> MLPCache.allKeys(nt, trainingUUID);
+    }
+
+    /**
+     * Get count of networks to be trained in parallel.
+     *
+     * @return Count of networks.
+     */
+    public int trainingsCount() {
+        return networksCnt;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPCache.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPCache.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPCache.java
new file mode 100644
index 0000000..0fa2f29
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPCache.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.nn.trainers.distributed;
+
+import java.util.UUID;
+import java.util.stream.IntStream;
+import java.util.stream.Stream;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.CacheAtomicityMode;
+import org.apache.ignite.cache.CacheMode;
+import org.apache.ignite.cache.CacheWriteSynchronizationMode;
+import org.apache.ignite.cache.affinity.Affinity;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.ml.trainers.group.GroupTrainerCacheKey;
+
+/**
+ * Cache for distributed MLP.
+ */
+public class MLPCache {
+    /**
+     * Cache name.
+     */
+    public static String CACHE_NAME = "MLP_CACHE";
+
+    /**
+     * Affinity service for region projections cache.
+     *
+     * @return Affinity service for region projections cache.
+     */
+    public static Affinity<GroupTrainerCacheKey<Void>> affinity() {
+        return Ignition.localIgnite().affinity(CACHE_NAME);
+    }
+
+    /**
+     * Get or create region projections cache.
+     *
+     * @param ignite Ignite instance.
+     * @return Region projections cache.
+     */
+    public static IgniteCache<GroupTrainerCacheKey<Void>, MLPGroupTrainingCacheValue> getOrCreate(Ignite ignite) {
+        CacheConfiguration<GroupTrainerCacheKey<Void>, MLPGroupTrainingCacheValue> cfg = new CacheConfiguration<>();
+
+        // Write to primary.
+        cfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.PRIMARY_SYNC);
+
+        // Atomic transactions only.
+        cfg.setAtomicityMode(CacheAtomicityMode.ATOMIC);
+
+        // No copying of values.
+        cfg.setCopyOnRead(false);
+
+        // Cache is partitioned.
+        cfg.setCacheMode(CacheMode.PARTITIONED);
+
+        cfg.setBackups(0);
+
+        cfg.setOnheapCacheEnabled(true);
+
+        cfg.setName(CACHE_NAME);
+
+        return ignite.getOrCreateCache(cfg);
+    }
+
+    /**
+     * Get all keys of this cache for given parameters.
+     *
+     * @param trainingsCnt Parallel trainings count.
+     * @param uuid Training UUID.
+     * @return All keys of this cache for given parameters.
+     */
+    public static Stream<GroupTrainerCacheKey<Void>> allKeys(int trainingsCnt, UUID uuid) {
+        return IntStream.range(0, trainingsCnt).mapToObj(i -> new GroupTrainerCacheKey<Void>(i, null, uuid));
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupTrainingCacheValue.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupTrainingCacheValue.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupTrainingCacheValue.java
new file mode 100644
index 0000000..f8e75f6
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupTrainingCacheValue.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.nn.trainers.distributed;
+
+import org.apache.ignite.ml.nn.MultilayerPerceptron;
+
+/**
+ * Value of cache used for group training by {@link MLPGroupUpdateTrainer}.
+ */
+public class MLPGroupTrainingCacheValue {
+    /**
+     * Multilayer perceptron.
+     */
+    private MultilayerPerceptron mlp;
+
+    /**
+     * Construct instance of this class with given parameters.
+     *
+     * @param mlp Multilayer perceptron.
+     */
+    public MLPGroupTrainingCacheValue(MultilayerPerceptron mlp) {
+        this.mlp = mlp;
+    }
+
+    /**
+     * Get multilayer perceptron.
+     *
+     * @return Multilayer perceptron.
+     */
+    public MultilayerPerceptron perceptron() {
+        return mlp;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainer.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainer.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainer.java
new file mode 100644
index 0000000..1f98b53
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainer.java
@@ -0,0 +1,362 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.nn.trainers.distributed;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.UUID;
+import java.util.stream.Stream;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.math.Matrix;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.functions.IgniteDifferentiableVectorToDoubleFunction;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.math.functions.IgniteSupplier;
+import org.apache.ignite.ml.math.util.MatrixUtil;
+import org.apache.ignite.ml.nn.LossFunctions;
+import org.apache.ignite.ml.nn.MultilayerPerceptron;
+import org.apache.ignite.ml.nn.updaters.ParameterUpdateCalculator;
+import org.apache.ignite.ml.nn.updaters.RPropParameterUpdate;
+import org.apache.ignite.ml.nn.updaters.RPropUpdateCalculator;
+import org.apache.ignite.ml.trainers.group.GroupTrainerCacheKey;
+import org.apache.ignite.ml.trainers.group.MetaoptimizerGroupTrainer;
+import org.apache.ignite.ml.trainers.group.ResultAndUpdates;
+import org.apache.ignite.ml.trainers.group.chain.EntryAndContext;
+import org.apache.ignite.ml.util.Utils;
+
+/**
+ * Update-based distributed training of MLP.
+ *
+ * @param <U> Type of update.
+ */
+public class MLPGroupUpdateTrainer<U extends Serializable> extends
+    MetaoptimizerGroupTrainer<MLPGroupUpdateTrainerLocalContext,
+        Void,
+        MLPGroupTrainingCacheValue,
+        U,
+        MultilayerPerceptron,
+        U,
+        MultilayerPerceptron,
+        AbstractMLPGroupUpdateTrainerInput,
+        MLPGroupUpdateTrainingContext<U>,
+        ArrayList<U>,
+        MLPGroupUpdateTrainingLoopData<U>,
+        U> {
+    /**
+     * Loss function.
+     */
+    private final IgniteFunction<Vector, IgniteDifferentiableVectorToDoubleFunction> loss;
+
+    /**
+     * Error tolerance.
+     */
+    private final double tolerance;
+
+    /**
+     * Maximal count of global steps.
+     */
+    private final int maxGlobalSteps;
+
+    /**
+     * Synchronize updates between networks every syncRate steps.
+     */
+    private final int syncRate;
+
+    /**
+     * Function used to reduce updates from different networks (for example, averaging of gradients of all networks).
+     */
+    private final IgniteFunction<List<U>, U> allUpdatesReducer;
+
+    /**
+     * Function used to reduce updates in one training (for example, sum all sequential gradient updates to get one
+     * gradient update).
+     */
+    private final IgniteFunction<List<U>, U> locStepUpdatesReducer;
+
+    /**
+     * Updates calculator.
+     */
+    private final ParameterUpdateCalculator<MultilayerPerceptron, U> updateCalculator;
+
+    /**
+     * Default maximal count of global steps.
+     */
+    private static final int DEFAULT_MAX_GLOBAL_STEPS = 30;
+
+    /**
+     * Default sync rate.
+     */
+    private static final int DEFAULT_SYNC_RATE = 5;
+
+    /**
+     * Default all updates reducer.
+     */
+    private static final IgniteFunction<List<RPropParameterUpdate>, RPropParameterUpdate>
+        DEFAULT_ALL_UPDATES_REDUCER = RPropParameterUpdate::avg;
+
+    /**
+     * Default local steps updates reducer.
+     */
+    private static final IgniteFunction<List<RPropParameterUpdate>, RPropParameterUpdate>
+        DEFAULT_LOCAL_STEP_UPDATES_REDUCER = RPropParameterUpdate::sumLocal;
+
+    /**
+     * Default update calculator.
+     */
+    private static final ParameterUpdateCalculator<MultilayerPerceptron, RPropParameterUpdate>
+        DEFAULT_UPDATE_CALCULATOR = new RPropUpdateCalculator<>();
+
+    /**
+     * Default loss function.
+     */
+    private static final IgniteFunction<Vector, IgniteDifferentiableVectorToDoubleFunction> DEFAULT_LOSS
+        = LossFunctions.MSE;
+
+    /**
+     * Construct instance of this class with given parameters.
+     *
+     * @param loss Loss function.
+     * @param ignite Ignite instance.
+     * @param tolerance Error tolerance.
+     */
+    public MLPGroupUpdateTrainer(int maxGlobalSteps,
+        int syncRate,
+        IgniteFunction<List<U>, U> allUpdatesReducer,
+        IgniteFunction<List<U>, U> locStepUpdatesReducer,
+        ParameterUpdateCalculator<MultilayerPerceptron, U> updateCalculator,
+        IgniteFunction<Vector, IgniteDifferentiableVectorToDoubleFunction> loss,
+        Ignite ignite, double tolerance) {
+        super(new MLPMetaoptimizer<>(allUpdatesReducer), MLPCache.getOrCreate(ignite), ignite);
+
+        this.maxGlobalSteps = maxGlobalSteps;
+        this.syncRate = syncRate;
+        this.allUpdatesReducer = allUpdatesReducer;
+        this.locStepUpdatesReducer = locStepUpdatesReducer;
+        this.updateCalculator = updateCalculator;
+        this.loss = loss;
+        this.tolerance = tolerance;
+    }
+
+    /**
+     * Get default {@link MLPGroupUpdateTrainer}.
+     *
+     * @param ignite Ignite instance.
+     * @return Default {@link MLPGroupUpdateTrainer}.
+     */
+    public static MLPGroupUpdateTrainer<RPropParameterUpdate> getDefault(Ignite ignite) {
+        return new MLPGroupUpdateTrainer<>(DEFAULT_MAX_GLOBAL_STEPS, DEFAULT_SYNC_RATE, DEFAULT_ALL_UPDATES_REDUCER,
+            DEFAULT_LOCAL_STEP_UPDATES_REDUCER, DEFAULT_UPDATE_CALCULATOR, DEFAULT_LOSS, ignite, 0.01);
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void init(AbstractMLPGroupUpdateTrainerInput data, UUID trainingUUID) {
+        super.init(data, trainingUUID);
+
+        MLPGroupUpdateTrainerDataCache.getOrCreate(ignite).put(trainingUUID, new MLPGroupUpdateTrainingData<>(
+            updateCalculator,
+            syncRate,
+            locStepUpdatesReducer,
+            data.batchSupplier(),
+            loss,
+            tolerance
+        ));
+    }
+
+    /** {@inheritDoc} */
+    @Override protected IgniteFunction<GroupTrainerCacheKey<Void>, ResultAndUpdates<U>> distributedInitializer(
+        AbstractMLPGroupUpdateTrainerInput data) {
+        MultilayerPerceptron initPerceptron = data.mdl();
+
+        // For each key put initial network into the cache.
+        return key -> {
+            Ignite ignite = Ignition.localIgnite();
+
+            U initUpdate = updateCalculator.init(initPerceptron, loss);
+
+            return ResultAndUpdates.of(initUpdate).updateCache(MLPCache.getOrCreate(ignite), key,
+                new MLPGroupTrainingCacheValue(initPerceptron));
+        };
+    }
+
+    /** {@inheritDoc} */
+    @Override protected IgniteFunction<EntryAndContext<Void, MLPGroupTrainingCacheValue,
+        MLPGroupUpdateTrainingContext<U>>, MLPGroupUpdateTrainingLoopData<U>> trainingLoopStepDataExtractor() {
+        return entryAndContext -> {
+            MLPGroupUpdateTrainingContext<U> ctx = entryAndContext.context();
+            Map.Entry<GroupTrainerCacheKey<Void>, MLPGroupTrainingCacheValue> entry = entryAndContext.entry();
+            MLPGroupUpdateTrainingData<U> data = ctx.data();
+
+            return new MLPGroupUpdateTrainingLoopData<>(entry.getValue().perceptron(),
+                data.updateCalculator(), data.stepsCnt(), data.updateReducer(), ctx.previousUpdate(), entry.getKey(),
+                data.batchSupplier(), data.loss(), data.tolerance());
+        };
+    }
+
+    /** {@inheritDoc} */
+    @Override protected IgniteSupplier<Stream<GroupTrainerCacheKey<Void>>> keysToProcessInTrainingLoop(
+        MLPGroupUpdateTrainerLocalContext locCtx) {
+        int trainingsCnt = locCtx.parallelTrainingsCnt();
+        UUID uuid = locCtx.trainingUUID();
+
+        return () -> MLPCache.allKeys(trainingsCnt, uuid);
+    }
+
+    /** {@inheritDoc} */
+    @Override protected IgniteSupplier<MLPGroupUpdateTrainingContext<U>> remoteContextExtractor(U prevUpdate,
+        MLPGroupUpdateTrainerLocalContext ctx) {
+        UUID uuid = ctx.trainingUUID();
+
+        return () -> {
+            MLPGroupUpdateTrainingData<U> data = MLPGroupUpdateTrainerDataCache
+                .getOrCreate(Ignition.localIgnite()).get(uuid);
+            return new MLPGroupUpdateTrainingContext<>(data, prevUpdate);
+        };
+    }
+
+    /** {@inheritDoc} */
+    @Override protected IgniteFunction<MLPGroupUpdateTrainingLoopData<U>, ResultAndUpdates<U>> dataProcessor() {
+        return data -> {
+            MultilayerPerceptron mlp = data.mlp();
+
+            MultilayerPerceptron mlpCp = Utils.copy(mlp);
+            ParameterUpdateCalculator<MultilayerPerceptron, U> updateCalculator = data.updateCalculator();
+            IgniteFunction<Vector, IgniteDifferentiableVectorToDoubleFunction> loss = data.loss();
+
+            // ParameterUpdateCalculator API to have proper way to setting loss.
+            updateCalculator.init(mlpCp, loss);
+
+            U curUpdate = data.previousUpdate();
+
+            int steps = data.stepsCnt();
+            List<U> updates = new ArrayList<>(steps);
+
+            IgniteBiTuple<Matrix, Matrix> batch = data.batchSupplier().get();
+
+            for (int i = 0; i < steps; i++) {
+                Matrix input = batch.get1();
+                Matrix truth = batch.get2();
+
+                int batchSize = truth.columnSize();
+
+                Matrix predicted = mlpCp.apply(input);
+
+                double err = MatrixUtil.zipFoldByColumns(predicted, truth, (predCol, truthCol) ->
+                    loss.apply(truthCol).apply(predCol)).sum() / batchSize;
+
+                if (err < data.tolerance())
+                    break;
+
+                mlpCp = updateCalculator.update(mlpCp, curUpdate);
+                updates.add(curUpdate);
+
+                curUpdate = updateCalculator.calculateNewUpdate(mlpCp, curUpdate, i, input, truth);
+            }
+
+            U update = data.getUpdateReducer().apply(updates);
+
+            MultilayerPerceptron newMlp = updateCalculator.update(mlp, data.previousUpdate());
+
+            return new ResultAndUpdates<>(update).
+                updateCache(MLPCache.getOrCreate(Ignition.localIgnite()), data.key(),
+                    new MLPGroupTrainingCacheValue(newMlp));
+        };
+    }
+
+    /** {@inheritDoc} */
+    @Override protected MLPGroupUpdateTrainerLocalContext<U> initialLocalContext(
+        AbstractMLPGroupUpdateTrainerInput data, UUID trainingUUID) {
+        return new MLPGroupUpdateTrainerLocalContext<>(trainingUUID, maxGlobalSteps, allUpdatesReducer,
+            data.trainingsCount());
+    }
+
+    /** {@inheritDoc} */
+    @Override protected IgniteSupplier<Stream<GroupTrainerCacheKey<Void>>> finalResultKeys(U data,
+        MLPGroupUpdateTrainerLocalContext locCtx) {
+        UUID uuid = locCtx.trainingUUID();
+        int trainingsCnt = locCtx.parallelTrainingsCnt();
+
+        return () -> MLPCache.allKeys(trainingsCnt, uuid);
+    }
+
+    /** {@inheritDoc} */
+    @Override protected IgniteSupplier<MLPGroupUpdateTrainingContext<U>> extractContextForFinalResultCreation(U data,
+        MLPGroupUpdateTrainerLocalContext locCtx) {
+        return () -> null;
+    }
+
+    /** {@inheritDoc} */
+    @Override protected IgniteFunction<EntryAndContext<Void, MLPGroupTrainingCacheValue,
+        MLPGroupUpdateTrainingContext<U>>, ResultAndUpdates<MultilayerPerceptron>> finalResultsExtractor() {
+        return context -> ResultAndUpdates.of(context.entry().getValue().perceptron());
+    }
+
+    /** {@inheritDoc} */
+    @Override protected IgniteFunction<List<MultilayerPerceptron>, MultilayerPerceptron> finalResultsReducer() {
+        // Just take any of MLPs since they will be in the same state.
+        return mlps -> mlps.stream().filter(Objects::nonNull).findFirst().orElse(null);
+    }
+
+    /** {@inheritDoc} */
+    @Override protected MultilayerPerceptron mapFinalResult(MultilayerPerceptron res,
+        MLPGroupUpdateTrainerLocalContext locCtx) {
+        return res;
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void cleanup(MLPGroupUpdateTrainerLocalContext locCtx) {
+
+    }
+
+    /**
+     * Create new {@link MLPGroupUpdateTrainer} with new maxGlobalSteps value.
+     *
+     * @param maxGlobalSteps New maxGlobalSteps value.
+     * @return New {@link MLPGroupUpdateTrainer} with new maxGlobalSteps value.
+     */
+    public MLPGroupUpdateTrainer<U> withMaxGlobalSteps(int maxGlobalSteps) {
+        return new MLPGroupUpdateTrainer<>(maxGlobalSteps, syncRate, allUpdatesReducer, locStepUpdatesReducer,
+            updateCalculator, loss, ignite, tolerance);
+    }
+
+    /**
+     * Create new {@link MLPGroupUpdateTrainer} with new syncRate value.
+     *
+     * @param syncRate New syncRate value.
+     * @return New {@link MLPGroupUpdateTrainer} with new syncRate value.
+     */
+    public MLPGroupUpdateTrainer<U> withSyncRate(int syncRate) {
+        return new MLPGroupUpdateTrainer<>(maxGlobalSteps, syncRate
+            , allUpdatesReducer, locStepUpdatesReducer, updateCalculator, loss, ignite, tolerance);
+    }
+
+    /**
+     * Create new {@link MLPGroupUpdateTrainer} with new tolerance.
+     *
+     * @param tolerance New tolerance value.
+     * @return New {@link MLPGroupUpdateTrainer} with new tolerance value.
+     */
+    public MLPGroupUpdateTrainer<U> withTolerance(double tolerance) {
+        return new MLPGroupUpdateTrainer<>(maxGlobalSteps, syncRate, allUpdatesReducer, locStepUpdatesReducer,
+            updateCalculator, loss, ignite, tolerance);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainerDataCache.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainerDataCache.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainerDataCache.java
new file mode 100644
index 0000000..c237f86
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainerDataCache.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.nn.trainers.distributed;
+
+import java.util.UUID;
+import org.apache.ignite.Ignite;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.Ignition;
+import org.apache.ignite.cache.CacheAtomicityMode;
+import org.apache.ignite.cache.CacheMode;
+import org.apache.ignite.cache.CacheWriteSynchronizationMode;
+import org.apache.ignite.cache.affinity.Affinity;
+import org.apache.ignite.configuration.CacheConfiguration;
+
+/**
+ * Cache used for storing data for {@link MLPGroupUpdateTrainer}.
+ */
+public class MLPGroupUpdateTrainerDataCache {
+    /**
+     * Cache name.
+     */
+    public static String CACHE_NAME = "MLP_GRP_TRN_DATA_CACHE";
+
+    /**
+     * Affinity service for region projections cache.
+     *
+     * @return Affinity service for region projections cache.
+     */
+    public static Affinity<UUID> affinity() {
+        return Ignition.localIgnite().affinity(CACHE_NAME);
+    }
+
+    /**
+     * Get or create region projections cache.
+     *
+     * @param ignite Ignite instance.
+     * @return Region projections cache.
+     */
+    public static IgniteCache<UUID, MLPGroupUpdateTrainingData> getOrCreate(Ignite ignite) {
+        CacheConfiguration<UUID, MLPGroupUpdateTrainingData> cfg = new CacheConfiguration<>();
+
+        // Write to primary.
+        cfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.PRIMARY_SYNC);
+
+        // Atomic transactions only.
+        cfg.setAtomicityMode(CacheAtomicityMode.ATOMIC);
+
+        // No copying of values.
+        cfg.setCopyOnRead(false);
+
+        // Cache is partitioned.
+        cfg.setCacheMode(CacheMode.REPLICATED);
+
+        cfg.setBackups(0);
+
+        cfg.setOnheapCacheEnabled(true);
+
+        cfg.setName(CACHE_NAME);
+
+        return ignite.getOrCreateCache(cfg);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainerLocalContext.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainerLocalContext.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainerLocalContext.java
new file mode 100644
index 0000000..ecb141d
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainerLocalContext.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.nn.trainers.distributed;
+
+import java.util.List;
+import java.util.UUID;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.trainers.group.chain.HasTrainingUUID;
+
+/**
+ * Local context for {@link MLPGroupUpdateTrainer}.
+ *
+ * @param <U> Type of updates on which training is done.
+ */
+public class MLPGroupUpdateTrainerLocalContext<U> implements HasTrainingUUID {
+    /**
+     * UUID of training.
+     */
+    private final UUID trainingUUID;
+
+    /**
+     * Maximal number of global steps.
+     */
+    private final int globalStepsMaxCnt;
+
+    /**
+     * Reducer used to reduce updates resulted from each parallel training.
+     */
+    private final IgniteFunction<List<U>, U> allUpdatesReducer;
+
+    /**
+     * Count of networks to be trained in parallel.
+     */
+    private final int parallelTrainingsCnt;
+
+    /**
+     * Current global step of {@link MLPGroupUpdateTrainer}.
+     */
+    private int curStep;
+
+    /** Create multilayer perceptron group update trainer local context. */
+    public MLPGroupUpdateTrainerLocalContext(UUID trainingUUID, int globalStepsMaxCnt,
+        IgniteFunction<List<U>, U> allUpdatesReducer, int parallelTrainingsCnt) {
+        this.trainingUUID = trainingUUID;
+        this.globalStepsMaxCnt = globalStepsMaxCnt;
+        this.allUpdatesReducer = allUpdatesReducer;
+        this.parallelTrainingsCnt = parallelTrainingsCnt;
+        curStep = 0;
+    }
+
+    /** {@inheritDoc} */
+    @Override public UUID trainingUUID() {
+        return trainingUUID;
+    }
+
+    /**
+     * Get global steps max count.
+     *
+     * @return Global steps max count.
+     */
+    public int globalStepsMaxCount() {
+        return globalStepsMaxCnt;
+    }
+
+    /**
+     * Get reducer used to reduce updates resulted from each parallel training.
+     *
+     * @return Reducer used to reduce updates resulted from each parallel training.
+     */
+    public IgniteFunction<List<U>, U> allUpdatesReducer() {
+        return allUpdatesReducer;
+    }
+
+    /**
+     * Get count of networks to be trained in parallel.
+     *
+     * @return Count of networks to be trained in parallel.
+     */
+    public int parallelTrainingsCnt() {
+        return parallelTrainingsCnt;
+    }
+
+    /**
+     * Get current global step.
+     *
+     * @return Current global step.
+     */
+    public int currentStep() {
+        return curStep;
+    }
+
+    /**
+     * Increment current global step.
+     *
+     * @return This object.
+     */
+    public MLPGroupUpdateTrainerLocalContext<U> incrementCurrentStep() {
+        curStep++;
+
+        return this;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainingContext.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainingContext.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainingContext.java
new file mode 100644
index 0000000..f4ccd98
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainingContext.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.nn.trainers.distributed;
+
+/**
+ * Context extracted in distribute phase of training loop step in {@link MLPGroupUpdateTrainer}.
+ *
+ * @param <U> Type of update.
+ */
+public class MLPGroupUpdateTrainingContext<U> {
+    /**
+     * Group training data.
+     */
+    private final MLPGroupUpdateTrainingData<U> data;
+
+    /**
+     * Update produced by previous training loop step.
+     */
+    private final U previousUpdate;
+
+    /**
+     * Construct an instance of this class.
+     *
+     * @param data Group training data.
+     * @param previousUpdate Update produced by previous training loop step.
+     */
+    public MLPGroupUpdateTrainingContext(MLPGroupUpdateTrainingData<U> data, U previousUpdate) {
+        this.data = data;
+        this.previousUpdate = previousUpdate;
+    }
+
+    /**
+     * Get group training data.
+     *
+     * @return Group training data.
+     */
+    public MLPGroupUpdateTrainingData<U> data() {
+        return data;
+    }
+
+    /**
+     * Get update produced by previous training loop step.
+     *
+     * @return Update produced by previous training loop step.
+     */
+    public U previousUpdate() {
+        return previousUpdate;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/4e24927e/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainingData.java
----------------------------------------------------------------------
diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainingData.java b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainingData.java
new file mode 100644
index 0000000..86074dd
--- /dev/null
+++ b/modules/ml/src/main/java/org/apache/ignite/ml/nn/trainers/distributed/MLPGroupUpdateTrainingData.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.ml.nn.trainers.distributed;
+
+import java.util.List;
+import org.apache.ignite.lang.IgniteBiTuple;
+import org.apache.ignite.ml.math.Matrix;
+import org.apache.ignite.ml.math.Vector;
+import org.apache.ignite.ml.math.functions.IgniteDifferentiableVectorToDoubleFunction;
+import org.apache.ignite.ml.math.functions.IgniteFunction;
+import org.apache.ignite.ml.math.functions.IgniteSupplier;
+import org.apache.ignite.ml.nn.MultilayerPerceptron;
+import org.apache.ignite.ml.nn.updaters.ParameterUpdateCalculator;
+
+/** Multilayer perceptron group update training data. */
+public class MLPGroupUpdateTrainingData<U> {
+    /** */
+    private final ParameterUpdateCalculator<MultilayerPerceptron, U> updateCalculator;
+    /** */
+    private final int stepsCnt;
+    /** */
+    private final IgniteFunction<List<U>, U> updateReducer;
+    /** */
+    private final IgniteSupplier<IgniteBiTuple<Matrix, Matrix>> batchSupplier;
+    /** */
+    private final IgniteFunction<Vector, IgniteDifferentiableVectorToDoubleFunction> loss;
+    /** */
+    private final double tolerance;
+
+    /** Construct multilayer perceptron group update training data with all parameters provided. */
+    public MLPGroupUpdateTrainingData(
+        ParameterUpdateCalculator<MultilayerPerceptron, U> updateCalculator, int stepsCnt,
+        IgniteFunction<List<U>, U> updateReducer,
+        IgniteSupplier<IgniteBiTuple<Matrix, Matrix>> batchSupplier,
+        IgniteFunction<Vector, IgniteDifferentiableVectorToDoubleFunction> loss, double tolerance) {
+        this.updateCalculator = updateCalculator;
+        this.stepsCnt = stepsCnt;
+        this.updateReducer = updateReducer;
+        this.batchSupplier = batchSupplier;
+        this.loss = loss;
+        this.tolerance = tolerance;
+    }
+
+    /** Get update calculator. */
+    public ParameterUpdateCalculator<MultilayerPerceptron, U> updateCalculator() {
+        return updateCalculator;
+    }
+
+    /** Get count of steps. */
+    public int stepsCnt() {
+        return stepsCnt;
+    }
+
+    /** Get update reducer. */
+    public IgniteFunction<List<U>, U> updateReducer() {
+        return updateReducer;
+    }
+
+    /** Get batch supplier. */
+    public IgniteSupplier<IgniteBiTuple<Matrix, Matrix>> batchSupplier() {
+        return batchSupplier;
+    }
+
+    /** Get loss function. */
+    public IgniteFunction<Vector, IgniteDifferentiableVectorToDoubleFunction> loss() {
+        return loss;
+    }
+
+    /** Get tolerance. */
+    public double tolerance() {
+        return tolerance;
+    }
+}


Mime
View raw message