From commits-return-117277-archive-asf-public=cust-asf.ponee.io@ignite.apache.org Mon Mar 19 07:26:52 2018 Return-Path: X-Original-To: archive-asf-public@cust-asf.ponee.io Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by mx-eu-01.ponee.io (Postfix) with SMTP id BA97718067E for ; Mon, 19 Mar 2018 07:26:44 +0100 (CET) Received: (qmail 25710 invoked by uid 500); 19 Mar 2018 06:26:43 -0000 Mailing-List: contact commits-help@ignite.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@ignite.apache.org Delivered-To: mailing list commits@ignite.apache.org Received: (qmail 24037 invoked by uid 99); 19 Mar 2018 06:26:41 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 19 Mar 2018 06:26:41 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 6BE85F66E6; Mon, 19 Mar 2018 06:26:38 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: sboikov@apache.org To: commits@ignite.apache.org Date: Mon, 19 Mar 2018 06:26:50 -0000 Message-Id: <255047dd394a40c1be51cdb528ca9963@git.apache.org> In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [13/50] [abbrv] ignite git commit: IGNITE-7876: Adopt SVM Linear Binary Classification Model and Trainer to the new Partitioned Dataset IGNITE-7876: Adopt SVM Linear Binary Classification Model and Trainer to the new Partitioned Dataset This closes #3607 Project: http://git-wip-us.apache.org/repos/asf/ignite/repo Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/318ffe50 Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/318ffe50 Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/318ffe50 Branch: refs/heads/ignite-zk Commit: 318ffe50fc0edb1d31f05044be8367bd578b6a88 Parents: 29d56ae Author: zaleslaw Authored: Mon Mar 12 15:28:11 2018 +0300 Committer: Yury Babak Committed: Mon Mar 12 15:28:11 2018 +0300 ---------------------------------------------------------------------- .../ml/svm/SVMBinaryClassificationExample.java | 131 -- .../ml/svm/SVMMultiClassificationExample.java | 130 -- .../src/main/resources/datasets/titanic.txt | 1309 ------------------ .../apache/ignite/ml/structures/Dataset.java | 10 + .../ignite/ml/structures/LabeledDataset.java | 16 +- .../svm/SVMLinearBinaryClassificationModel.java | 4 +- .../SVMLinearBinaryClassificationTrainer.java | 87 +- .../SVMLinearMultiClassClassificationModel.java | 89 -- ...VMLinearMultiClassClassificationTrainer.java | 160 --- .../ignite/ml/svm/SVMPartitionContext.java | 28 + .../ml/svm/SVMPartitionDataBuilderOnHeap.java | 86 ++ .../org/apache/ignite/ml/LocalModelsTest.java | 28 - .../org/apache/ignite/ml/svm/BaseSVMTest.java | 58 - .../ignite/ml/svm/SVMBinaryTrainerTest.java | 74 + .../org/apache/ignite/ml/svm/SVMModelTest.java | 15 - .../org/apache/ignite/ml/svm/SVMTestSuite.java | 13 +- ...inearSVMBinaryClassificationTrainerTest.java | 35 - ...inearSVMBinaryClassificationTrainerTest.java | 141 -- ...inearSVMBinaryClassificationTrainerTest.java | 38 - ...rSVMMultiClassClassificationTrainerTest.java | 35 - ...rSVMMultiClassClassificationTrainerTest.java | 76 - ...rSVMMultiClassClassificationTrainerTest.java | 38 - 22 files changed, 272 insertions(+), 2329 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMBinaryClassificationExample.java ---------------------------------------------------------------------- diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMBinaryClassificationExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMBinaryClassificationExample.java deleted file mode 100644 index e256276..0000000 --- a/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMBinaryClassificationExample.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.examples.ml.svm; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Path; -import org.apache.ignite.Ignite; -import org.apache.ignite.Ignition; -import org.apache.ignite.examples.ExampleNodeStartup; -import org.apache.ignite.internal.util.IgniteUtils; -import org.apache.ignite.ml.Trainer; -import org.apache.ignite.ml.structures.LabeledDataset; -import org.apache.ignite.ml.structures.LabeledDatasetTestTrainPair; -import org.apache.ignite.ml.structures.preprocessing.LabeledDatasetLoader; -import org.apache.ignite.ml.structures.preprocessing.LabellingMachine; -import org.apache.ignite.ml.structures.preprocessing.Normalizer; -import org.apache.ignite.ml.svm.SVMLinearBinaryClassificationModel; -import org.apache.ignite.ml.svm.SVMLinearBinaryClassificationTrainer; -import org.apache.ignite.thread.IgniteThread; - -/** - *

- * Example of using {@link org.apache.ignite.ml.svm.SVMLinearBinaryClassificationModel} with Titanic dataset.

- *

- * Note that in this example we cannot guarantee order in which nodes return results of intermediate - * computations and therefore algorithm can return different results.

- *

- * Remote nodes should always be started with special configuration file which - * enables P2P class loading: {@code 'ignite.{sh|bat} examples/config/example-ignite.xml'}.

- *

- * Alternatively you can run {@link ExampleNodeStartup} in another JVM which will start node - * with {@code examples/config/example-ignite.xml} configuration.

- */ -public class SVMBinaryClassificationExample { - /** Separator. */ - private static final String SEPARATOR = ","; - - /** Path to the Iris dataset. */ - private static final String TITANIC_DATASET = "examples/src/main/resources/datasets/titanic.txt"; - - /** - * Executes example. - * - * @param args Command line arguments, none required. - */ - public static void main(String[] args) throws InterruptedException { - System.out.println(">>> SVM Binary classification example started."); - // Start ignite grid. - try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) { - System.out.println(">>> Ignite grid started."); - - IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(), - SVMBinaryClassificationExample.class.getSimpleName(), () -> { - - try { - // Prepare path to read - File file = IgniteUtils.resolveIgnitePath(TITANIC_DATASET); - if (file == null) - throw new RuntimeException("Can't find file: " + TITANIC_DATASET); - - Path path = file.toPath(); - - // Read dataset from file - LabeledDataset dataset = LabeledDatasetLoader.loadFromTxtFile(path, SEPARATOR, true, false); - - // Normalize dataset - Normalizer.normalizeWithMiniMax(dataset); - - // Random splitting of the given data as 70% train and 30% test datasets - LabeledDatasetTestTrainPair split = new LabeledDatasetTestTrainPair(dataset, 0.3); - - System.out.println("\n>>> Amount of observations in train dataset " + split.train().rowSize()); - System.out.println("\n>>> Amount of observations in test dataset " + split.test().rowSize()); - - LabeledDataset test = split.test(); - LabeledDataset train = split.train(); - - System.out.println("\n>>> Create new linear binary SVM trainer object."); - Trainer trainer = new SVMLinearBinaryClassificationTrainer(); - - System.out.println("\n>>> Perform the training to get the model."); - SVMLinearBinaryClassificationModel mdl = trainer.train(train); - - System.out.println("\n>>> SVM classification model: " + mdl); - - // Clone labels - final double[] labels = test.labels(); - - // Save predicted classes to test dataset - LabellingMachine.assignLabels(test, mdl); - - // Calculate amount of errors on test dataset - int amountOfErrors = 0; - for (int i = 0; i < test.rowSize(); i++) { - if (test.label(i) != labels[i]) - amountOfErrors++; - } - - System.out.println("\n>>> Absolute amount of errors " + amountOfErrors); - System.out.println("\n>>> Prediction percentage " + (1 - amountOfErrors / (double) test.rowSize())); - - } catch (IOException e) { - e.printStackTrace(); - System.out.println("\n>>> Unexpected exception, check resources: " + e); - } finally { - System.out.println("\n>>> SVM binary classification example completed."); - } - - }); - - igniteThread.start(); - igniteThread.join(); - } - } -} http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMMultiClassificationExample.java ---------------------------------------------------------------------- diff --git a/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMMultiClassificationExample.java b/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMMultiClassificationExample.java deleted file mode 100644 index 7aee5d2..0000000 --- a/examples/src/main/java/org/apache/ignite/examples/ml/svm/SVMMultiClassificationExample.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.examples.ml.svm; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Path; -import org.apache.ignite.Ignite; -import org.apache.ignite.Ignition; -import org.apache.ignite.examples.ExampleNodeStartup; -import org.apache.ignite.internal.util.IgniteUtils; -import org.apache.ignite.ml.Trainer; -import org.apache.ignite.ml.structures.LabeledDataset; -import org.apache.ignite.ml.structures.LabeledDatasetTestTrainPair; -import org.apache.ignite.ml.structures.preprocessing.LabeledDatasetLoader; -import org.apache.ignite.ml.structures.preprocessing.LabellingMachine; -import org.apache.ignite.ml.structures.preprocessing.Normalizer; -import org.apache.ignite.ml.svm.SVMLinearMultiClassClassificationModel; -import org.apache.ignite.ml.svm.SVMLinearMultiClassClassificationTrainer; -import org.apache.ignite.thread.IgniteThread; - -/** - *

- * Example of using {@link org.apache.ignite.ml.svm.SVMLinearMultiClassClassificationModel} with Iris dataset.

- *

- * Note that in this example we cannot guarantee order in which nodes return results of intermediate - * computations and therefore algorithm can return different results.

- *

- * Remote nodes should always be started with special configuration file which - * enables P2P class loading: {@code 'ignite.{sh|bat} examples/config/example-ignite.xml'}.

- *

- * Alternatively you can run {@link ExampleNodeStartup} in another JVM which will start node - * with {@code examples/config/example-ignite.xml} configuration.

- */ -public class SVMMultiClassificationExample { - /** Separator. */ - private static final String SEPARATOR = "\t"; - - /** Path to the Iris dataset. */ - private static final String IRIS_DATASET = "examples/src/main/resources/datasets/iris.txt"; - - /** - * Executes example. - * - * @param args Command line arguments, none required. - */ - public static void main(String[] args) throws InterruptedException { - System.out.println(">>> SVM Multi-Class classification example started."); - // Start ignite grid. - try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) { - System.out.println(">>> Ignite grid started."); - - IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(), - SVMMultiClassificationExample.class.getSimpleName(), () -> { - - try { - // Prepare path to read - File file = IgniteUtils.resolveIgnitePath(IRIS_DATASET); - if (file == null) - throw new RuntimeException("Can't find file: " + IRIS_DATASET); - - Path path = file.toPath(); - - // Read dataset from file - LabeledDataset dataset = LabeledDatasetLoader.loadFromTxtFile(path, SEPARATOR, true, false); - - // Normalize dataset - Normalizer.normalizeWithMiniMax(dataset); - - // Random splitting of the given data as 70% train and 30% test datasets - LabeledDatasetTestTrainPair split = new LabeledDatasetTestTrainPair(dataset, 0.3); - - System.out.println("\n>>> Amount of observations in train dataset " + split.train().rowSize()); - System.out.println("\n>>> Amount of observations in test dataset " + split.test().rowSize()); - - LabeledDataset test = split.test(); - LabeledDataset train = split.train(); - - System.out.println("\n>>> Create new linear multi-class SVM trainer object."); - Trainer trainer = new SVMLinearMultiClassClassificationTrainer(); - - System.out.println("\n>>> Perform the training to get the model."); - SVMLinearMultiClassClassificationModel mdl = trainer.train(train); - - System.out.println("\n>>> SVM classification model: " + mdl); - - // Clone labels - final double[] labels = test.labels(); - - // Save predicted classes to test dataset - LabellingMachine.assignLabels(test, mdl); - - // Calculate amount of errors on test dataset - int amountOfErrors = 0; - for (int i = 0; i < test.rowSize(); i++) { - if (test.label(i) != labels[i]) - amountOfErrors++; - } - - System.out.println("\n>>> Absolute amount of errors " + amountOfErrors); - System.out.println("\n>>> Prediction percentage " + (1 - amountOfErrors / (double) test.rowSize())); - - } catch (IOException e) { - e.printStackTrace(); - System.out.println("\n>>> Unexpected exception, check resources: " + e); - } finally { - System.out.println("\n>>> SVM Multi-Class classification example completed."); - } - }); - - igniteThread.start(); - igniteThread.join(); - } - } -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/examples/src/main/resources/datasets/titanic.txt ---------------------------------------------------------------------- diff --git a/examples/src/main/resources/datasets/titanic.txt b/examples/src/main/resources/datasets/titanic.txt deleted file mode 100644 index fa2dc70..0000000 --- a/examples/src/main/resources/datasets/titanic.txt +++ /dev/null @@ -1,1309 +0,0 @@ -1,1,2,29,0,0,211.3375 -1,1,1,0.9167,1,2,151.55 --1,1,2,2,1,2,151.55 --1,1,1,30,1,2,151.55 --1,1,2,25,1,2,151.55 -1,1,1,48,0,0,26.55 -1,1,2,63,1,0,77.9583 --1,1,1,39,0,0,0 -1,1,2,53,2,0,51.4792 --1,1,1,71,0,0,49.5042 --1,1,1,47,1,0,227.525 -1,1,2,18,1,0,227.525 -1,1,2,24,0,0,69.3 -1,1,2,26,0,0,78.85 -1,1,1,80,0,0,30 --1,1,1,,0,0,25.925 --1,1,1,24,0,1,247.5208 -1,1,2,50,0,1,247.5208 -1,1,2,32,0,0,76.2917 --1,1,1,36,0,0,75.2417 -1,1,1,37,1,1,52.5542 -1,1,2,47,1,1,52.5542 -1,1,1,26,0,0,30 -1,1,2,42,0,0,227.525 -1,1,2,29,0,0,221.7792 --1,1,1,25,0,0,26 -1,1,1,25,1,0,91.0792 -1,1,2,19,1,0,91.0792 -1,1,2,35,0,0,135.6333 -1,1,1,28,0,0,26.55 --1,1,1,45,0,0,35.5 -1,1,1,40,0,0,31 -1,1,2,30,0,0,164.8667 -1,1,2,58,0,0,26.55 --1,1,1,42,0,0,26.55 -1,1,2,45,0,0,262.375 -1,1,2,22,0,1,55 -1,1,1,,0,0,26.55 --1,1,1,41,0,0,30.5 --1,1,1,48,0,0,50.4958 --1,1,1,,0,0,39.6 -1,1,2,44,0,0,27.7208 -1,1,2,59,2,0,51.4792 -1,1,2,60,0,0,76.2917 -1,1,2,41,0,0,134.5 --1,1,1,45,0,0,26.55 --1,1,1,,0,0,31 -1,1,1,42,0,0,26.2875 -1,1,2,53,0,0,27.4458 -1,1,1,36,0,1,512.3292 -1,1,2,58,0,1,512.3292 --1,1,1,33,0,0,5 --1,1,1,28,0,0,47.1 --1,1,1,17,0,0,47.1 -1,1,1,11,1,2,120 -1,1,2,14,1,2,120 -1,1,1,36,1,2,120 -1,1,2,36,1,2,120 --1,1,1,49,0,0,26 -1,1,2,,0,0,27.7208 --1,1,1,36,1,0,78.85 -1,1,2,76,1,0,78.85 --1,1,1,46,1,0,61.175 -1,1,2,47,1,0,61.175 -1,1,1,27,1,0,53.1 -1,1,2,33,1,0,53.1 -1,1,2,36,0,0,262.375 -1,1,2,30,0,0,86.5 -1,1,1,45,0,0,29.7 -1,1,2,,0,1,55 --1,1,1,,0,0,0 --1,1,1,27,1,0,136.7792 -1,1,2,26,1,0,136.7792 -1,1,2,22,0,0,151.55 --1,1,1,,0,0,52 --1,1,1,47,0,0,25.5875 -1,1,2,39,1,1,83.1583 --1,1,1,37,1,1,83.1583 -1,1,2,64,0,2,83.1583 -1,1,2,55,2,0,25.7 --1,1,1,,0,0,26.55 --1,1,1,70,1,1,71 -1,1,2,36,0,2,71 -1,1,2,64,1,1,26.55 --1,1,1,39,1,0,71.2833 -1,1,2,38,1,0,71.2833 -1,1,1,51,0,0,26.55 -1,1,1,27,0,0,30.5 -1,1,2,33,0,0,151.55 --1,1,1,31,1,0,52 -1,1,2,27,1,2,52 -1,1,1,31,1,0,57 -1,1,2,17,1,0,57 -1,1,1,53,1,1,81.8583 -1,1,1,4,0,2,81.8583 -1,1,2,54,1,1,81.8583 --1,1,1,50,1,0,106.425 -1,1,2,27,1,1,247.5208 -1,1,2,48,1,0,106.425 -1,1,2,48,1,0,39.6 -1,1,1,49,1,0,56.9292 --1,1,1,39,0,0,29.7 -1,1,2,23,0,1,83.1583 -1,1,2,38,0,0,227.525 -1,1,2,54,1,0,78.2667 --1,1,2,36,0,0,31.6792 --1,1,1,,0,0,221.7792 -1,1,2,,0,0,31.6833 -1,1,2,,0,0,110.8833 -1,1,1,36,0,0,26.3875 --1,1,1,30,0,0,27.75 -1,1,2,24,3,2,263 -1,1,2,28,3,2,263 -1,1,2,23,3,2,263 --1,1,1,19,3,2,263 --1,1,1,64,1,4,263 -1,1,2,60,1,4,263 -1,1,2,30,0,0,56.9292 --1,1,1,,0,0,26.55 -1,1,1,50,2,0,133.65 -1,1,1,43,1,0,27.7208 -1,1,2,,1,0,133.65 -1,1,2,22,0,2,49.5 -1,1,1,60,1,1,79.2 -1,1,2,48,1,1,79.2 --1,1,1,,0,0,0 --1,1,1,37,1,0,53.1 -1,1,2,35,1,0,53.1 --1,1,1,47,0,0,38.5 -1,1,2,35,0,0,211.5 -1,1,2,22,0,1,59.4 -1,1,2,45,0,1,59.4 --1,1,1,24,0,0,79.2 -1,1,1,49,1,0,89.1042 -1,1,2,,1,0,89.1042 --1,1,1,71,0,0,34.6542 -1,1,1,53,0,0,28.5 -1,1,2,19,0,0,30 --1,1,1,38,0,1,153.4625 -1,1,2,58,0,1,153.4625 -1,1,1,23,0,1,63.3583 -1,1,2,45,0,1,63.3583 --1,1,1,46,0,0,79.2 -1,1,1,25,1,0,55.4417 -1,1,2,25,1,0,55.4417 -1,1,1,48,1,0,76.7292 -1,1,2,49,1,0,76.7292 --1,1,1,,0,0,42.4 --1,1,1,45,1,0,83.475 -1,1,2,35,1,0,83.475 --1,1,1,40,0,0,0 -1,1,1,27,0,0,76.7292 -1,1,1,,0,0,30 -1,1,2,24,0,0,83.1583 --1,1,1,55,1,1,93.5 -1,1,2,52,1,1,93.5 --1,1,1,42,0,0,42.5 --1,1,1,,0,0,51.8625 --1,1,1,55,0,0,50 -1,1,2,16,0,1,57.9792 -1,1,2,44,0,1,57.9792 -1,1,2,51,1,0,77.9583 --1,1,1,42,1,0,52 -1,1,2,35,1,0,52 -1,1,1,35,0,0,26.55 -1,1,1,38,1,0,90 --1,1,1,,0,0,30.6958 -1,1,2,35,1,0,90 -1,1,2,38,0,0,80 --1,1,2,50,0,0,28.7125 -1,1,1,49,0,0,0 --1,1,1,46,0,0,26 --1,1,1,50,0,0,26 --1,1,1,32.5,0,0,211.5 --1,1,1,58,0,0,29.7 --1,1,1,41,1,0,51.8625 -1,1,2,,1,0,51.8625 -1,1,1,42,1,0,52.5542 -1,1,2,45,1,0,52.5542 --1,1,1,,0,0,26.55 -1,1,2,39,0,0,211.3375 -1,1,2,49,0,0,25.9292 -1,1,2,30,0,0,106.425 -1,1,1,35,0,0,512.3292 --1,1,1,,0,0,27.7208 --1,1,1,42,0,0,26.55 -1,1,2,55,0,0,27.7208 -1,1,2,16,0,1,39.4 -1,1,2,51,0,1,39.4 --1,1,1,29,0,0,30 -1,1,2,21,0,0,77.9583 --1,1,1,30,0,0,45.5 -1,1,2,58,0,0,146.5208 -1,1,2,15,0,1,211.3375 --1,1,1,30,0,0,26 -1,1,2,16,0,0,86.5 -1,1,1,,0,0,29.7 --1,1,1,19,1,0,53.1 -1,1,2,18,1,0,53.1 -1,1,2,24,0,0,49.5042 --1,1,1,46,0,0,75.2417 --1,1,1,54,0,0,51.8625 -1,1,1,36,0,0,26.2875 --1,1,1,28,1,0,82.1708 -1,1,2,,1,0,82.1708 --1,1,1,65,0,0,26.55 --1,1,1,44,2,0,90 -1,1,2,33,1,0,90 -1,1,2,37,1,0,90 -1,1,1,30,1,0,57.75 --1,1,1,55,0,0,30.5 --1,1,1,47,0,0,42.4 --1,1,1,37,0,1,29.7 -1,1,2,31,1,0,113.275 -1,1,2,23,1,0,113.275 --1,1,1,58,0,2,113.275 -1,1,2,19,0,2,26.2833 --1,1,1,64,0,0,26 -1,1,2,39,0,0,108.9 -1,1,1,,0,0,25.7417 -1,1,2,22,0,1,61.9792 --1,1,1,65,0,1,61.9792 --1,1,1,28.5,0,0,27.7208 --1,1,1,,0,0,0 --1,1,1,45.5,0,0,28.5 --1,1,1,23,0,0,93.5 --1,1,1,29,1,0,66.6 -1,1,2,22,1,0,66.6 --1,1,1,18,1,0,108.9 -1,1,2,17,1,0,108.9 -1,1,2,30,0,0,93.5 -1,1,1,52,0,0,30.5 --1,1,1,47,0,0,52 -1,1,2,56,0,1,83.1583 --1,1,1,38,0,0,0 -1,1,1,,0,0,39.6 --1,1,1,22,0,0,135.6333 --1,1,1,,0,0,227.525 -1,1,2,43,0,1,211.3375 --1,1,1,31,0,0,50.4958 -1,1,1,45,0,0,26.55 --1,1,1,,0,0,50 -1,1,2,33,0,0,27.7208 --1,1,1,46,0,0,79.2 --1,1,1,36,0,0,40.125 -1,1,2,33,0,0,86.5 --1,1,1,55,1,0,59.4 -1,1,2,54,1,0,59.4 --1,1,1,33,0,0,26.55 -1,1,1,13,2,2,262.375 -1,1,2,18,2,2,262.375 -1,1,2,21,2,2,262.375 --1,1,1,61,1,3,262.375 -1,1,2,48,1,3,262.375 -1,1,1,,0,0,30.5 -1,1,2,24,0,0,69.3 -1,1,1,,0,0,26 -1,1,2,35,1,0,57.75 -1,1,2,30,0,0,31 -1,1,1,34,0,0,26.55 -1,1,2,40,0,0,153.4625 -1,1,1,35,0,0,26.2875 --1,1,1,50,1,0,55.9 -1,1,2,39,1,0,55.9 -1,1,1,56,0,0,35.5 -1,1,1,28,0,0,35.5 --1,1,1,56,0,0,26.55 --1,1,1,56,0,0,30.6958 --1,1,1,24,1,0,60 --1,1,1,,0,0,26 -1,1,2,18,1,0,60 -1,1,1,24,1,0,82.2667 -1,1,2,23,1,0,82.2667 -1,1,1,6,0,2,134.5 -1,1,1,45,1,1,134.5 -1,1,2,40,1,1,134.5 --1,1,1,57,1,0,146.5208 -1,1,2,,1,0,146.5208 -1,1,1,32,0,0,30.5 --1,1,1,62,0,0,26.55 -1,1,1,54,1,0,55.4417 -1,1,2,43,1,0,55.4417 -1,1,2,52,1,0,78.2667 --1,1,1,,0,0,27.7208 -1,1,2,62,0,0,80 --1,1,1,67,1,0,221.7792 --1,1,2,63,1,0,221.7792 --1,1,1,61,0,0,32.3208 -1,1,2,48,0,0,25.9292 -1,1,2,18,0,2,79.65 --1,1,1,52,1,1,79.65 -1,1,2,39,1,1,79.65 -1,1,1,48,1,0,52 -1,1,2,,1,0,52 --1,1,1,49,1,1,110.8833 -1,1,1,17,0,2,110.8833 -1,1,2,39,1,1,110.8833 -1,1,2,,0,0,79.2 -1,1,1,31,0,0,28.5375 --1,1,1,40,0,0,27.7208 --1,1,1,61,0,0,33.5 --1,1,1,47,0,0,34.0208 -1,1,2,35,0,0,512.3292 --1,1,1,64,1,0,75.25 -1,1,2,60,1,0,75.25 --1,1,1,60,0,0,26.55 --1,1,1,54,0,1,77.2875 --1,1,1,21,0,1,77.2875 -1,1,2,55,0,0,135.6333 -1,1,2,31,0,2,164.8667 --1,1,1,57,1,1,164.8667 -1,1,2,45,1,1,164.8667 --1,1,1,50,1,1,211.5 --1,1,1,27,0,2,211.5 -1,1,2,50,1,1,211.5 -1,1,2,21,0,0,26.55 --1,1,1,51,0,1,61.3792 -1,1,1,21,0,1,61.3792 --1,1,1,,0,0,35 -1,1,2,31,0,0,134.5 -1,1,1,,0,0,35.5 --1,1,1,62,0,0,26.55 -1,1,2,36,0,0,135.6333 --1,2,1,30,1,0,24 -1,2,2,28,1,0,24 --1,2,1,30,0,0,13 --1,2,1,18,0,0,11.5 --1,2,1,25,0,0,10.5 --1,2,1,34,1,0,26 -1,2,2,36,1,0,26 --1,2,1,57,0,0,13 --1,2,1,18,0,0,11.5 --1,2,1,23,0,0,10.5 -1,2,2,36,0,0,13 --1,2,1,28,0,0,10.5 --1,2,1,51,0,0,12.525 -1,2,1,32,1,0,26 -1,2,2,19,1,0,26 --1,2,1,28,0,0,26 -1,2,1,1,2,1,39 -1,2,2,4,2,1,39 -1,2,2,12,2,1,39 -1,2,2,36,0,3,39 -1,2,1,34,0,0,13 -1,2,2,19,0,0,13 --1,2,1,23,0,0,13 --1,2,1,26,0,0,13 --1,2,1,42,0,0,13 --1,2,1,27,0,0,13 -1,2,2,24,0,0,13 -1,2,2,15,0,2,39 --1,2,1,60,1,1,39 -1,2,2,40,1,1,39 -1,2,2,20,1,0,26 --1,2,1,25,1,0,26 -1,2,2,36,0,0,13 --1,2,1,25,0,0,13 --1,2,1,42,0,0,13 -1,2,2,42,0,0,13 -1,2,1,0.8333,0,2,29 -1,2,1,26,1,1,29 -1,2,2,22,1,1,29 -1,2,2,35,0,0,21 --1,2,1,,0,0,0 --1,2,1,19,0,0,13 --1,2,2,44,1,0,26 --1,2,1,54,1,0,26 --1,2,1,52,0,0,13.5 --1,2,1,37,1,0,26 --1,2,2,29,1,0,26 -1,2,2,25,1,1,30 -1,2,2,45,0,2,30 --1,2,1,29,1,0,26 -1,2,2,28,1,0,26 --1,2,1,29,0,0,10.5 --1,2,1,28,0,0,13 -1,2,1,24,0,0,10.5 -1,2,2,8,0,2,26.25 --1,2,1,31,1,1,26.25 -1,2,2,31,1,1,26.25 -1,2,2,22,0,0,10.5 --1,2,2,30,0,0,13 --1,2,2,,0,0,21 --1,2,1,21,0,0,11.5 --1,2,1,,0,0,0 -1,2,1,8,1,1,36.75 --1,2,1,18,0,0,73.5 -1,2,2,48,0,2,36.75 -1,2,2,28,0,0,13 --1,2,1,32,0,0,13 --1,2,1,17,0,0,73.5 --1,2,1,29,1,0,27.7208 -1,2,2,24,1,0,27.7208 --1,2,1,25,0,0,31.5 --1,2,1,18,0,0,73.5 -1,2,2,18,0,1,23 -1,2,2,34,0,1,23 --1,2,1,54,0,0,26 -1,2,1,8,0,2,32.5 --1,2,1,42,1,1,32.5 -1,2,2,34,1,1,32.5 -1,2,2,27,1,0,13.8583 -1,2,2,30,1,0,13.8583 --1,2,1,23,0,0,13 --1,2,1,21,0,0,13 --1,2,1,18,0,0,13 --1,2,1,40,1,0,26 -1,2,2,29,1,0,26 --1,2,1,18,0,0,10.5 --1,2,1,36,0,0,13 --1,2,1,,0,0,0 --1,2,2,38,0,0,13 --1,2,1,35,0,0,26 --1,2,1,38,1,0,21 --1,2,1,34,1,0,21 -1,2,2,34,0,0,13 --1,2,1,16,0,0,26 --1,2,1,26,0,0,10.5 --1,2,1,47,0,0,10.5 --1,2,1,21,1,0,11.5 --1,2,1,21,1,0,11.5 --1,2,1,24,0,0,13.5 --1,2,1,24,0,0,13 --1,2,1,34,0,0,13 --1,2,1,30,0,0,13 --1,2,1,52,0,0,13 --1,2,1,30,0,0,13 -1,2,1,0.6667,1,1,14.5 -1,2,2,24,0,2,14.5 --1,2,1,44,0,0,13 -1,2,2,6,0,1,33 --1,2,1,28,0,1,33 -1,2,1,62,0,0,10.5 --1,2,1,30,0,0,10.5 -1,2,2,7,0,2,26.25 --1,2,1,43,1,1,26.25 -1,2,2,45,1,1,26.25 -1,2,2,24,1,2,65 -1,2,2,24,1,2,65 --1,2,1,49,1,2,65 -1,2,2,48,1,2,65 -1,2,2,55,0,0,16 --1,2,1,24,2,0,73.5 --1,2,1,32,2,0,73.5 --1,2,1,21,2,0,73.5 --1,2,2,18,1,1,13 -1,2,2,20,2,1,23 --1,2,1,23,2,1,11.5 --1,2,1,36,0,0,13 -1,2,2,54,1,3,23 --1,2,1,50,0,0,13 --1,2,1,44,1,0,26 -1,2,2,29,1,0,26 --1,2,1,21,0,0,73.5 -1,2,1,42,0,0,13 --1,2,1,63,1,0,26 --1,2,2,60,1,0,26 --1,2,1,33,0,0,12.275 -1,2,2,17,0,0,10.5 --1,2,1,42,1,0,27 -1,2,2,24,2,1,27 --1,2,1,47,0,0,15 --1,2,1,24,2,0,31.5 --1,2,1,22,2,0,31.5 --1,2,1,32,0,0,10.5 -1,2,2,23,0,0,13.7917 --1,2,1,34,1,0,26 -1,2,2,24,1,0,26 --1,2,2,22,0,0,21 -1,2,2,,0,0,12.35 --1,2,1,35,0,0,12.35 -1,2,2,45,0,0,13.5 --1,2,1,57,0,0,12.35 --1,2,1,,0,0,0 --1,2,1,31,0,0,10.5 --1,2,2,26,1,1,26 --1,2,1,30,1,1,26 --1,2,1,,0,0,10.7083 -1,2,2,1,1,2,41.5792 -1,2,2,3,1,2,41.5792 --1,2,1,25,1,2,41.5792 -1,2,2,22,1,2,41.5792 -1,2,2,17,0,0,12 -1,2,2,,0,0,33 -1,2,2,34,0,0,10.5 --1,2,1,36,0,0,12.875 --1,2,1,24,0,0,10.5 --1,2,1,61,0,0,12.35 --1,2,1,50,1,0,26 -1,2,2,42,1,0,26 --1,2,2,57,0,0,10.5 --1,2,1,,0,0,15.0458 -1,2,1,1,0,2,37.0042 --1,2,1,31,1,1,37.0042 -1,2,2,24,1,1,37.0042 --1,2,1,,0,0,15.5792 --1,2,1,30,0,0,13 --1,2,1,40,0,0,16 --1,2,1,32,0,0,13.5 --1,2,1,30,0,0,13 --1,2,1,46,0,0,26 -1,2,2,13,0,1,19.5 -1,2,2,41,0,1,19.5 -1,2,1,19,0,0,10.5 --1,2,1,39,0,0,13 --1,2,1,48,0,0,13 --1,2,1,70,0,0,10.5 --1,2,1,27,0,0,13 --1,2,1,54,0,0,14 --1,2,1,39,0,0,26 --1,2,1,16,0,0,10.5 --1,2,1,62,0,0,9.6875 --1,2,1,32.5,1,0,30.0708 -1,2,2,14,1,0,30.0708 -1,2,1,2,1,1,26 -1,2,1,3,1,1,26 --1,2,1,36.5,0,2,26 --1,2,1,26,0,0,13 --1,2,1,19,1,1,36.75 --1,2,1,28,0,0,13.5 -1,2,1,20,0,0,13.8625 -1,2,2,29,0,0,10.5 --1,2,1,39,0,0,13 -1,2,1,22,0,0,10.5 -1,2,1,,0,0,13.8625 --1,2,1,23,0,0,10.5 -1,2,1,29,0,0,13.8583 --1,2,1,28,0,0,10.5 --1,2,1,,0,0,0 -1,2,2,50,0,1,26 --1,2,1,19,0,0,10.5 --1,2,1,,0,0,15.05 --1,2,1,41,0,0,13 -1,2,2,21,0,1,21 -1,2,2,19,0,0,26 --1,2,1,43,0,1,21 -1,2,2,32,0,0,13 --1,2,1,34,0,0,13 -1,2,1,30,0,0,12.7375 --1,2,1,27,0,0,15.0333 -1,2,2,2,1,1,26 -1,2,2,8,1,1,26 -1,2,2,33,0,2,26 --1,2,1,36,0,0,10.5 --1,2,1,34,1,0,21 -1,2,2,30,3,0,21 -1,2,2,28,0,0,13 --1,2,1,23,0,0,15.0458 -1,2,1,0.8333,1,1,18.75 -1,2,1,3,1,1,18.75 -1,2,2,24,2,3,18.75 -1,2,2,50,0,0,10.5 --1,2,1,19,0,0,10.5 -1,2,2,21,0,0,10.5 --1,2,1,26,0,0,13 --1,2,1,25,0,0,13 --1,2,1,27,0,0,26 -1,2,2,25,0,1,26 -1,2,2,18,0,2,13 -1,2,2,20,0,0,36.75 -1,2,2,30,0,0,13 --1,2,1,59,0,0,13.5 -1,2,2,30,0,0,12.35 --1,2,1,35,0,0,10.5 -1,2,2,40,0,0,13 --1,2,1,25,0,0,13 --1,2,1,41,0,0,15.0458 --1,2,1,25,0,0,10.5 --1,2,1,18.5,0,0,13 --1,2,1,14,0,0,65 -1,2,2,50,0,0,10.5 --1,2,1,23,0,0,13 -1,2,2,28,0,0,12.65 -1,2,2,27,0,0,10.5 --1,2,1,29,1,0,21 --1,2,2,27,1,0,21 --1,2,1,40,0,0,13 -1,2,2,31,0,0,21 --1,2,1,30,1,0,21 --1,2,1,23,1,0,10.5 -1,2,2,31,0,0,21 --1,2,1,,0,0,0 -1,2,2,12,0,0,15.75 -1,2,2,40,0,0,15.75 -1,2,2,32.5,0,0,13 --1,2,1,27,1,0,26 -1,2,2,29,1,0,26 -1,2,1,2,1,1,23 -1,2,2,4,1,1,23 -1,2,2,29,0,2,23 -1,2,2,0.9167,1,2,27.75 -1,2,2,5,1,2,27.75 --1,2,1,36,1,2,27.75 -1,2,2,33,1,2,27.75 --1,2,1,66,0,0,10.5 --1,2,1,,0,0,12.875 -1,2,1,31,0,0,13 -1,2,1,,0,0,13 -1,2,2,26,0,0,13.5 --1,2,2,24,0,0,13 --1,3,1,42,0,0,7.55 --1,3,1,13,0,2,20.25 --1,3,1,16,1,1,20.25 -1,3,2,35,1,1,20.25 -1,3,2,16,0,0,7.65 -1,3,1,25,0,0,7.65 -1,3,1,20,0,0,7.925 -1,3,2,18,0,0,7.2292 --1,3,1,30,0,0,7.25 --1,3,1,26,0,0,8.05 --1,3,2,40,1,0,9.475 -1,3,1,0.8333,0,1,9.35 -1,3,2,18,0,1,9.35 -1,3,1,26,0,0,18.7875 --1,3,1,26,0,0,7.8875 --1,3,1,20,0,0,7.925 --1,3,1,24,0,0,7.05 --1,3,1,25,0,0,7.05 --1,3,1,35,0,0,8.05 --1,3,1,18,0,0,8.3 --1,3,1,32,0,0,22.525 -1,3,2,19,1,0,7.8542 --1,3,1,4,4,2,31.275 --1,3,2,6,4,2,31.275 --1,3,2,2,4,2,31.275 -1,3,2,17,4,2,7.925 --1,3,2,38,4,2,7.775 --1,3,2,9,4,2,31.275 --1,3,2,11,4,2,31.275 --1,3,1,39,1,5,31.275 -1,3,1,27,0,0,7.7958 --1,3,1,26,0,0,7.775 --1,3,2,39,1,5,31.275 --1,3,1,20,0,0,7.8542 --1,3,1,26,0,0,7.8958 --1,3,1,25,1,0,17.8 --1,3,2,18,1,0,17.8 --1,3,1,24,0,0,7.775 --1,3,1,35,0,0,7.05 --1,3,1,5,4,2,31.3875 --1,3,1,9,4,2,31.3875 -1,3,1,3,4,2,31.3875 --1,3,1,13,4,2,31.3875 -1,3,2,5,4,2,31.3875 --1,3,1,40,1,5,31.3875 -1,3,1,23,0,0,7.7958 -1,3,2,38,1,5,31.3875 -1,3,2,45,0,0,7.225 --1,3,1,21,0,0,7.225 --1,3,1,23,0,0,7.05 --1,3,2,17,0,0,14.4583 --1,3,1,30,0,0,7.225 --1,3,1,23,0,0,7.8542 -1,3,2,13,0,0,7.2292 --1,3,1,20,0,0,7.225 --1,3,1,32,1,0,15.85 -1,3,2,33,3,0,15.85 -1,3,2,0.75,2,1,19.2583 -1,3,2,0.75,2,1,19.2583 -1,3,2,5,2,1,19.2583 -1,3,2,24,0,3,19.2583 -1,3,2,18,0,0,8.05 --1,3,1,40,0,0,7.225 --1,3,1,26,0,0,7.8958 -1,3,1,20,0,0,7.2292 --1,3,2,18,0,1,14.4542 --1,3,2,45,0,1,14.4542 --1,3,2,27,0,0,7.8792 --1,3,1,22,0,0,8.05 --1,3,1,19,0,0,8.05 --1,3,1,26,0,0,7.775 --1,3,1,22,0,0,9.35 --1,3,1,,0,0,7.2292 --1,3,1,20,0,0,4.0125 -1,3,1,32,0,0,56.4958 --1,3,1,21,0,0,7.775 --1,3,1,18,0,0,7.75 --1,3,1,26,0,0,7.8958 --1,3,1,6,1,1,15.2458 --1,3,2,9,1,1,15.2458 --1,3,1,,0,0,7.225 --1,3,2,,0,2,15.2458 --1,3,2,,0,2,7.75 --1,3,1,40,1,1,15.5 --1,3,2,32,1,1,15.5 --1,3,1,21,0,0,16.1 -1,3,2,22,0,0,7.725 --1,3,2,20,0,0,7.8542 --1,3,1,29,1,0,7.0458 --1,3,1,22,1,0,7.25 --1,3,1,22,0,0,7.7958 --1,3,1,35,0,0,8.05 --1,3,2,18.5,0,0,7.2833 -1,3,1,21,0,0,7.8208 --1,3,1,19,0,0,6.75 --1,3,2,18,0,0,7.8792 --1,3,2,21,0,0,8.6625 --1,3,2,30,0,0,8.6625 --1,3,1,18,0,0,8.6625 --1,3,1,38,0,0,8.6625 --1,3,1,17,0,0,8.6625 --1,3,1,17,0,0,8.6625 --1,3,2,21,0,0,7.75 --1,3,1,21,0,0,7.75 --1,3,1,21,0,0,8.05 --1,3,1,,1,0,14.4583 --1,3,2,,1,0,14.4583 --1,3,1,28,0,0,7.7958 --1,3,1,24,0,0,7.8542 -1,3,2,16,0,0,7.75 --1,3,2,37,0,0,7.75 --1,3,1,28,0,0,7.25 --1,3,1,24,0,0,8.05 --1,3,1,21,0,0,7.7333 -1,3,1,32,0,0,56.4958 --1,3,1,29,0,0,8.05 --1,3,1,26,1,0,14.4542 --1,3,1,18,1,0,14.4542 --1,3,1,20,0,0,7.05 -1,3,1,18,0,0,8.05 --1,3,1,24,0,0,7.25 --1,3,1,36,0,0,7.4958 --1,3,1,24,0,0,7.4958 --1,3,1,31,0,0,7.7333 --1,3,1,31,0,0,7.75 -1,3,2,22,0,0,7.75 --1,3,2,30,0,0,7.6292 --1,3,1,70.5,0,0,7.75 --1,3,1,43,0,0,8.05 --1,3,1,35,0,0,7.8958 --1,3,1,27,0,0,7.8958 --1,3,1,19,0,0,7.8958 --1,3,1,30,0,0,8.05 -1,3,1,9,1,1,15.9 -1,3,1,3,1,1,15.9 -1,3,2,36,0,2,15.9 --1,3,1,59,0,0,7.25 --1,3,1,19,0,0,8.1583 -1,3,2,17,0,1,16.1 --1,3,1,44,0,1,16.1 --1,3,1,17,0,0,8.6625 --1,3,1,22.5,0,0,7.225 -1,3,1,45,0,0,8.05 --1,3,2,22,0,0,10.5167 --1,3,1,19,0,0,10.1708 -1,3,2,30,0,0,6.95 -1,3,1,29,0,0,7.75 --1,3,1,0.3333,0,2,14.4 --1,3,1,34,1,1,14.4 --1,3,2,28,1,1,14.4 --1,3,1,27,0,0,7.8958 --1,3,1,25,0,0,7.8958 --1,3,1,24,2,0,24.15 --1,3,1,22,0,0,8.05 --1,3,1,21,2,0,24.15 --1,3,1,17,2,0,8.05 --1,3,1,,1,0,16.1 -1,3,2,,1,0,16.1 -1,3,1,36.5,1,0,17.4 -1,3,2,36,1,0,17.4 -1,3,1,30,0,0,9.5 --1,3,1,16,0,0,9.5 -1,3,1,1,1,2,20.575 -1,3,2,0.1667,1,2,20.575 --1,3,1,26,1,2,20.575 -1,3,2,33,1,2,20.575 --1,3,1,25,0,0,7.8958 --1,3,1,,0,0,7.8958 --1,3,1,,0,0,7.8958 --1,3,1,22,0,0,7.25 --1,3,1,36,0,0,7.25 -1,3,2,19,0,0,7.8792 --1,3,1,17,0,0,7.8958 --1,3,1,42,0,0,8.6625 --1,3,1,43,0,0,7.8958 --1,3,1,,0,0,7.2292 --1,3,1,32,0,0,7.75 -1,3,1,19,0,0,8.05 -1,3,2,30,0,0,12.475 --1,3,2,24,0,0,7.75 -1,3,2,23,0,0,8.05 --1,3,1,33,0,0,7.8958 --1,3,1,65,0,0,7.75 -1,3,1,24,0,0,7.55 --1,3,1,23,1,0,13.9 -1,3,2,22,1,0,13.9 --1,3,1,18,0,0,7.775 --1,3,1,16,0,0,7.775 --1,3,1,45,0,0,6.975 --1,3,1,,0,0,7.225 --1,3,1,39,0,2,7.2292 --1,3,1,17,1,1,7.2292 --1,3,1,15,1,1,7.2292 --1,3,1,47,0,0,7.25 -1,3,2,5,0,0,12.475 --1,3,1,,0,0,7.225 --1,3,1,40.5,0,0,15.1 --1,3,1,40.5,0,0,7.75 -1,3,1,,0,0,7.05 --1,3,1,18,0,0,7.7958 --1,3,2,,0,0,7.75 --1,3,1,,0,0,7.75 --1,3,1,,0,0,6.95 --1,3,1,26,0,0,7.8792 --1,3,1,,0,0,7.75 -1,3,1,,0,0,56.4958 --1,3,2,21,2,2,34.375 --1,3,2,9,2,2,34.375 --1,3,1,,0,0,8.05 --1,3,1,18,2,2,34.375 --1,3,1,16,1,3,34.375 --1,3,2,48,1,3,34.375 --1,3,1,,0,0,7.75 --1,3,1,,0,0,7.25 --1,3,1,25,0,0,7.7417 --1,3,1,,0,0,14.5 --1,3,1,,0,0,7.8958 --1,3,1,22,0,0,8.05 -1,3,2,16,0,0,7.7333 -1,3,2,,0,0,7.75 -1,3,1,9,0,2,20.525 --1,3,1,33,1,1,20.525 --1,3,1,41,0,0,7.85 -1,3,2,31,1,1,20.525 --1,3,1,38,0,0,7.05 --1,3,1,9,5,2,46.9 --1,3,1,1,5,2,46.9 --1,3,1,11,5,2,46.9 --1,3,2,10,5,2,46.9 --1,3,2,16,5,2,46.9 --1,3,1,14,5,2,46.9 --1,3,1,40,1,6,46.9 --1,3,2,43,1,6,46.9 --1,3,1,51,0,0,8.05 --1,3,1,32,0,0,8.3625 --1,3,1,,0,0,8.05 --1,3,1,20,0,0,9.8458 --1,3,1,37,2,0,7.925 --1,3,1,28,2,0,7.925 --1,3,1,19,0,0,7.775 --1,3,2,24,0,0,8.85 --1,3,2,17,0,0,7.7333 --1,3,1,,1,0,19.9667 --1,3,1,,1,0,19.9667 --1,3,1,28,1,0,15.85 -1,3,2,24,1,0,15.85 --1,3,1,20,0,0,9.5 --1,3,1,23.5,0,0,7.2292 --1,3,1,41,2,0,14.1083 --1,3,1,26,1,0,7.8542 --1,3,1,21,0,0,7.8542 -1,3,2,45,1,0,14.1083 --1,3,2,,0,0,7.55 --1,3,1,25,0,0,7.25 --1,3,1,,0,0,6.8583 --1,3,1,11,0,0,18.7875 -1,3,2,,0,0,7.75 -1,3,1,27,0,0,6.975 -1,3,1,,0,0,56.4958 --1,3,2,18,0,0,6.75 -1,3,2,26,0,0,7.925 --1,3,2,23,0,0,7.925 -1,3,2,22,0,0,8.9625 --1,3,1,28,0,0,7.8958 --1,3,2,28,0,0,7.775 --1,3,2,,0,0,7.75 -1,3,2,2,0,1,12.2875 -1,3,2,22,1,1,12.2875 --1,3,1,43,0,0,6.45 --1,3,1,28,0,0,22.525 -1,3,2,27,0,0,7.925 --1,3,1,,0,0,7.75 -1,3,2,,0,0,8.05 --1,3,1,42,0,0,7.65 -1,3,1,,0,0,7.8875 --1,3,1,30,0,0,7.2292 --1,3,1,,0,0,7.8958 --1,3,2,27,1,0,7.925 --1,3,2,25,1,0,7.925 --1,3,1,,0,0,7.8958 -1,3,1,29,0,0,7.8958 -1,3,1,21,0,0,7.7958 --1,3,1,,0,0,7.05 --1,3,1,20,0,0,7.8542 --1,3,1,48,0,0,7.8542 --1,3,1,17,1,0,7.0542 -1,3,2,,0,0,7.75 -1,3,1,,0,0,8.1125 --1,3,1,34,0,0,6.4958 -1,3,1,26,0,0,7.775 --1,3,1,22,0,0,7.7958 --1,3,1,33,0,0,8.6542 --1,3,1,31,0,0,7.775 --1,3,1,29,0,0,7.8542 -1,3,1,4,1,1,11.1333 -1,3,2,1,1,1,11.1333 --1,3,1,49,0,0,0 --1,3,1,33,0,0,7.775 --1,3,1,19,0,0,0 -1,3,2,27,0,2,11.1333 --1,3,1,,1,2,23.45 --1,3,2,,1,2,23.45 --1,3,1,,1,2,23.45 --1,3,2,,1,2,23.45 --1,3,1,23,0,0,7.8958 -1,3,1,32,0,0,7.8542 --1,3,1,27,0,0,7.8542 --1,3,2,20,1,0,9.825 --1,3,2,21,1,0,9.825 -1,3,1,32,0,0,7.925 --1,3,1,17,0,0,7.125 --1,3,1,21,0,0,8.4333 --1,3,1,30,0,0,7.8958 -1,3,1,21,0,0,7.7958 --1,3,1,33,0,0,7.8542 --1,3,1,22,0,0,7.5208 -1,3,2,4,0,1,13.4167 -1,3,1,39,0,1,13.4167 --1,3,1,,0,0,7.2292 --1,3,1,18.5,0,0,7.2292 --1,3,1,,0,0,7.75 --1,3,1,,0,0,7.25 -1,3,2,,0,0,7.75 -1,3,2,,0,0,7.75 --1,3,1,34.5,0,0,7.8292 --1,3,1,44,0,0,8.05 -1,3,1,,0,0,7.75 --1,3,1,,1,0,14.4542 --1,3,2,,1,0,14.4542 --1,3,1,,1,0,7.75 --1,3,1,,1,0,7.75 --1,3,1,,0,0,7.7375 --1,3,2,22,2,0,8.6625 --1,3,1,26,2,0,8.6625 -1,3,2,4,0,2,22.025 -1,3,1,29,3,1,22.025 -1,3,2,26,1,1,22.025 --1,3,2,1,1,1,12.1833 --1,3,1,18,1,1,7.8542 --1,3,2,36,0,2,12.1833 --1,3,1,,0,0,7.8958 -1,3,1,25,0,0,7.2292 --1,3,1,,0,0,7.225 --1,3,2,37,0,0,9.5875 --1,3,1,,0,0,7.8958 -1,3,1,,0,0,56.4958 --1,3,1,,0,0,56.4958 -1,3,2,22,0,0,7.25 --1,3,1,,0,0,7.75 -1,3,1,26,0,0,56.4958 --1,3,1,29,0,0,9.4833 --1,3,1,29,0,0,7.775 --1,3,1,22,0,0,7.775 -1,3,1,22,0,0,7.225 --1,3,1,,3,1,25.4667 --1,3,2,,3,1,25.4667 --1,3,2,,3,1,25.4667 --1,3,2,,3,1,25.4667 --1,3,2,,0,4,25.4667 --1,3,1,32,0,0,7.925 --1,3,1,34.5,0,0,6.4375 --1,3,2,,1,0,15.5 --1,3,1,,1,0,15.5 --1,3,1,36,0,0,0 --1,3,1,39,0,0,24.15 --1,3,1,24,0,0,9.5 --1,3,2,25,0,0,7.775 --1,3,2,45,0,0,7.75 --1,3,1,36,1,0,15.55 --1,3,2,30,1,0,15.55 -1,3,1,20,1,0,7.925 --1,3,1,,0,0,7.8792 --1,3,1,28,0,0,56.4958 --1,3,1,,0,0,7.55 --1,3,1,30,1,0,16.1 --1,3,2,26,1,0,16.1 --1,3,1,,0,0,7.8792 --1,3,1,20.5,0,0,7.25 -1,3,1,27,0,0,8.6625 --1,3,1,51,0,0,7.0542 -1,3,2,23,0,0,7.8542 -1,3,1,32,0,0,7.5792 --1,3,1,,0,0,7.8958 --1,3,1,,0,0,7.55 -1,3,2,,0,0,7.75 -1,3,1,24,0,0,7.1417 --1,3,1,22,0,0,7.125 --1,3,2,,0,0,7.8792 --1,3,1,,0,0,7.75 --1,3,1,,0,0,8.05 --1,3,1,29,0,0,7.925 -1,3,1,,0,0,7.2292 --1,3,2,30.5,0,0,7.75 -1,3,2,,0,0,7.7375 --1,3,1,,0,0,7.2292 --1,3,1,35,0,0,7.8958 --1,3,1,33,0,0,7.8958 -1,3,2,,0,0,7.225 --1,3,1,,0,0,7.8958 -1,3,2,,0,0,7.75 -1,3,1,,0,0,7.75 -1,3,2,,2,0,23.25 -1,3,2,,2,0,23.25 -1,3,1,,2,0,23.25 -1,3,2,,0,0,7.7875 --1,3,1,,0,0,15.5 -1,3,2,,0,0,7.8792 -1,3,2,15,0,0,8.0292 --1,3,2,35,0,0,7.75 --1,3,1,,0,0,7.75 --1,3,1,24,1,0,16.1 --1,3,2,19,1,0,16.1 --1,3,2,,0,0,7.75 --1,3,2,,0,0,8.05 --1,3,2,,0,0,8.05 --1,3,1,55.5,0,0,8.05 --1,3,1,,0,0,7.75 -1,3,1,21,0,0,7.775 --1,3,1,,0,0,8.05 --1,3,1,24,0,0,7.8958 --1,3,1,21,0,0,7.8958 --1,3,1,28,0,0,7.8958 --1,3,1,,0,0,7.8958 -1,3,2,,0,0,7.8792 --1,3,1,25,0,0,7.65 -1,3,1,6,0,1,12.475 -1,3,2,27,0,1,12.475 --1,3,1,,0,0,8.05 -1,3,2,,1,0,24.15 --1,3,1,,1,0,24.15 --1,3,1,,0,0,8.4583 --1,3,1,34,0,0,8.05 --1,3,1,,0,0,7.75 -1,3,1,,0,0,7.775 -1,3,1,,1,1,15.2458 -1,3,1,,1,1,15.2458 -1,3,2,,0,2,15.2458 -1,3,2,,0,0,7.2292 --1,3,1,,0,0,8.05 -1,3,2,,0,0,7.7333 -1,3,2,24,0,0,7.75 --1,3,1,,0,0,8.05 -1,3,2,,1,0,15.5 -1,3,2,,1,0,15.5 -1,3,2,,0,0,15.5 --1,3,1,18,0,0,7.75 --1,3,1,22,0,0,7.8958 -1,3,2,15,0,0,7.225 -1,3,2,1,0,2,15.7417 -1,3,1,20,1,1,15.7417 -1,3,2,19,1,1,15.7417 --1,3,1,33,0,0,8.05 --1,3,1,,0,0,7.8958 --1,3,1,,0,0,7.2292 --1,3,2,,0,0,7.75 --1,3,1,,0,0,7.8958 -1,3,1,12,1,0,11.2417 -1,3,2,14,1,0,11.2417 --1,3,2,29,0,0,7.925 --1,3,1,28,0,0,8.05 -1,3,2,18,0,0,7.775 -1,3,2,26,0,0,7.8542 --1,3,1,21,0,0,7.8542 --1,3,1,41,0,0,7.125 -1,3,1,39,0,0,7.925 --1,3,1,21,0,0,7.8 --1,3,1,28.5,0,0,7.2292 -1,3,2,22,0,0,7.75 --1,3,1,61,0,0,6.2375 --1,3,1,,1,0,15.5 --1,3,1,,0,0,7.8292 -1,3,2,,1,0,15.5 --1,3,1,,0,0,7.7333 --1,3,1,,0,0,7.75 --1,3,1,,0,0,7.75 --1,3,1,23,0,0,9.225 --1,3,2,,0,0,7.75 -1,3,2,,0,0,7.75 -1,3,2,,0,0,7.8792 -1,3,2,22,0,0,7.775 -1,3,1,,0,0,7.75 -1,3,2,,0,0,7.8292 -1,3,1,9,0,1,3.1708 --1,3,1,28,0,0,22.525 --1,3,1,42,0,1,8.4042 --1,3,1,,0,0,7.3125 --1,3,2,31,0,0,7.8542 --1,3,1,28,0,0,7.8542 -1,3,1,32,0,0,7.775 --1,3,1,20,0,0,9.225 --1,3,2,23,0,0,8.6625 --1,3,2,20,0,0,8.6625 --1,3,1,20,0,0,8.6625 --1,3,1,16,0,0,9.2167 -1,3,2,31,0,0,8.6833 --1,3,2,,0,0,7.6292 --1,3,1,2,3,1,21.075 --1,3,1,6,3,1,21.075 --1,3,2,3,3,1,21.075 --1,3,2,8,3,1,21.075 --1,3,2,29,0,4,21.075 --1,3,1,1,4,1,39.6875 --1,3,1,7,4,1,39.6875 --1,3,1,2,4,1,39.6875 --1,3,1,16,4,1,39.6875 --1,3,1,14,4,1,39.6875 --1,3,2,41,0,5,39.6875 --1,3,1,21,0,0,8.6625 --1,3,1,19,0,0,14.5 --1,3,1,,0,0,8.7125 --1,3,1,32,0,0,7.8958 --1,3,1,0.75,1,1,13.775 --1,3,2,3,1,1,13.775 --1,3,2,26,0,2,13.775 --1,3,1,,0,0,7 --1,3,1,,0,0,7.775 --1,3,1,,0,0,8.05 --1,3,1,21,0,0,7.925 --1,3,1,25,0,0,7.925 --1,3,1,22,0,0,7.25 -1,3,1,25,1,0,7.775 -1,3,1,,1,1,22.3583 -1,3,2,,1,1,22.3583 -1,3,2,,0,2,22.3583 --1,3,2,,0,0,8.1375 --1,3,1,24,0,0,8.05 --1,3,2,28,0,0,7.8958 --1,3,1,19,0,0,7.8958 --1,3,1,,0,0,7.8958 --1,3,1,25,1,0,7.775 --1,3,2,18,0,0,7.775 -1,3,1,32,0,0,8.05 --1,3,1,,0,0,7.8958 --1,3,1,17,0,0,8.6625 --1,3,1,24,0,0,8.6625 --1,3,1,,0,0,7.8958 --1,3,2,,0,0,8.1125 --1,3,1,,0,0,7.2292 --1,3,1,,0,0,7.25 --1,3,1,38,0,0,7.8958 --1,3,1,21,0,0,8.05 --1,3,1,10,4,1,29.125 --1,3,1,4,4,1,29.125 --1,3,1,7,4,1,29.125 --1,3,1,2,4,1,29.125 --1,3,1,8,4,1,29.125 --1,3,2,39,0,5,29.125 --1,3,2,22,0,0,39.6875 --1,3,1,35,0,0,7.125 -1,3,2,,0,0,7.7208 --1,3,1,,0,0,14.5 --1,3,2,,0,0,14.5 --1,3,1,50,1,0,14.5 --1,3,2,47,1,0,14.5 --1,3,1,,0,0,8.05 --1,3,1,,0,0,7.775 --1,3,2,2,1,1,20.2125 --1,3,1,18,1,1,20.2125 --1,3,2,41,0,2,20.2125 -1,3,2,,0,0,8.05 --1,3,1,50,0,0,8.05 --1,3,1,16,0,0,8.05 -1,3,1,,0,0,7.75 --1,3,1,,0,0,24.15 --1,3,1,,0,0,7.2292 --1,3,1,25,0,0,7.225 --1,3,1,,0,0,7.225 --1,3,1,,0,0,7.7292 --1,3,1,,0,0,7.575 --1,3,1,38.5,0,0,7.25 --1,3,1,,8,2,69.55 --1,3,1,14.5,8,2,69.55 --1,3,2,,8,2,69.55 --1,3,2,,8,2,69.55 --1,3,2,,8,2,69.55 --1,3,2,,8,2,69.55 --1,3,1,,8,2,69.55 --1,3,1,,8,2,69.55 --1,3,1,,8,2,69.55 --1,3,1,,1,9,69.55 --1,3,2,,1,9,69.55 --1,3,1,24,0,0,9.325 -1,3,2,21,0,0,7.65 --1,3,1,39,0,0,7.925 --1,3,1,,2,0,21.6792 --1,3,1,,2,0,21.6792 --1,3,1,,2,0,21.6792 -1,3,2,1,1,1,16.7 -1,3,2,24,0,2,16.7 -1,3,2,4,1,1,16.7 -1,3,1,25,0,0,9.5 --1,3,1,20,0,0,8.05 --1,3,1,24.5,0,0,8.05 --1,3,1,,0,0,7.725 --1,3,1,,0,0,7.8958 --1,3,1,,0,0,7.75 -1,3,1,29,0,0,9.5 --1,3,1,,0,0,15.1 -1,3,2,,0,0,7.7792 --1,3,1,,0,0,8.05 --1,3,1,,0,0,8.05 --1,3,1,22,0,0,7.2292 --1,3,1,,0,0,8.05 --1,3,1,40,0,0,7.8958 --1,3,1,21,0,0,7.925 -1,3,2,18,0,0,7.4958 --1,3,1,4,3,2,27.9 --1,3,1,10,3,2,27.9 --1,3,2,9,3,2,27.9 --1,3,2,2,3,2,27.9 --1,3,1,40,1,4,27.9 --1,3,2,45,1,4,27.9 --1,3,1,,0,0,7.8958 --1,3,1,,0,0,8.05 --1,3,1,,0,0,8.6625 --1,3,1,,0,0,7.75 -1,3,2,,0,0,7.7333 --1,3,1,19,0,0,7.65 --1,3,1,30,0,0,8.05 --1,3,1,,0,0,8.05 --1,3,1,32,0,0,8.05 --1,3,1,,0,0,7.8958 --1,3,1,33,0,0,8.6625 -1,3,2,23,0,0,7.55 --1,3,1,21,0,0,8.05 --1,3,1,60.5,0,0,0 --1,3,1,19,0,0,7.8958 --1,3,2,22,0,0,9.8375 -1,3,1,31,0,0,7.925 --1,3,1,27,0,0,8.6625 --1,3,2,2,0,1,10.4625 --1,3,2,29,1,1,10.4625 -1,3,1,16,0,0,8.05 -1,3,1,44,0,0,7.925 --1,3,1,25,0,0,7.05 --1,3,1,74,0,0,7.775 -1,3,1,14,0,0,9.225 --1,3,1,24,0,0,7.7958 -1,3,1,25,0,0,7.7958 --1,3,1,34,0,0,8.05 -1,3,1,0.4167,0,1,8.5167 --1,3,1,,1,0,6.4375 --1,3,1,,0,0,6.4375 --1,3,1,,0,0,7.225 -1,3,2,16,1,1,8.5167 --1,3,1,,0,0,8.05 --1,3,1,,1,0,16.1 -1,3,2,,1,0,16.1 --1,3,1,32,0,0,7.925 --1,3,1,,0,0,7.75 --1,3,1,,0,0,7.8958 --1,3,1,30.5,0,0,8.05 --1,3,1,44,0,0,8.05 --1,3,1,,0,0,7.2292 -1,3,1,25,0,0,0 --1,3,1,,0,0,7.2292 -1,3,1,7,1,1,15.2458 -1,3,2,9,1,1,15.2458 -1,3,2,29,0,2,15.2458 --1,3,1,36,0,0,7.8958 -1,3,2,18,0,0,9.8417 -1,3,2,63,0,0,9.5875 --1,3,1,,1,1,14.5 --1,3,1,11.5,1,1,14.5 --1,3,1,40.5,0,2,14.5 --1,3,2,10,0,2,24.15 --1,3,1,36,1,1,24.15 --1,3,2,30,1,1,24.15 --1,3,1,,0,0,9.5 --1,3,1,33,0,0,9.5 --1,3,1,28,0,0,9.5 --1,3,1,28,0,0,9.5 --1,3,1,47,0,0,9 --1,3,2,18,2,0,18 --1,3,1,31,3,0,18 --1,3,1,16,2,0,18 --1,3,2,31,1,0,18 -1,3,1,22,0,0,7.225 --1,3,1,20,0,0,7.8542 --1,3,2,14,0,0,7.8542 --1,3,1,22,0,0,7.8958 --1,3,1,22,0,0,9 --1,3,1,,0,0,8.05 --1,3,1,,0,0,7.55 --1,3,1,,0,0,8.05 --1,3,1,32.5,0,0,9.5 -1,3,2,38,0,0,7.2292 --1,3,1,51,0,0,7.75 --1,3,1,18,1,0,6.4958 --1,3,1,21,1,0,6.4958 -1,3,2,47,1,0,7 --1,3,1,,0,0,8.7125 --1,3,1,,0,0,7.55 --1,3,1,,0,0,8.05 --1,3,1,28.5,0,0,16.1 --1,3,1,21,0,0,7.25 --1,3,1,27,0,0,8.6625 --1,3,1,,0,0,7.25 --1,3,1,36,0,0,9.5 --1,3,1,27,1,0,14.4542 -1,3,2,15,1,0,14.4542 --1,3,1,45.5,0,0,7.225 --1,3,1,,0,0,7.225 --1,3,1,,0,0,14.4583 --1,3,2,14.5,1,0,14.4542 --1,3,2,,1,0,14.4542 --1,3,1,26.5,0,0,7.225 --1,3,1,27,0,0,7.225 --1,3,1,29,0,0,7.875 http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/structures/Dataset.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/structures/Dataset.java b/modules/ml/src/main/java/org/apache/ignite/ml/structures/Dataset.java index 89ff1b4..cbed297 100644 --- a/modules/ml/src/main/java/org/apache/ignite/ml/structures/Dataset.java +++ b/modules/ml/src/main/java/org/apache/ignite/ml/structures/Dataset.java @@ -86,6 +86,16 @@ public class Dataset implements Serializable, Externaliz } /** + * Creates new Dataset by given data. + * + * @param data Should be initialized with one vector at least. + */ + public Dataset(Row[] data) { + this.data = data; + this.rowSize = data.length; + } + + /** * Creates new Dataset and initialized with empty data structure. * * @param rowSize Amount of instances. Should be > 0. http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledDataset.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledDataset.java b/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledDataset.java index 0028a16..3239116 100644 --- a/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledDataset.java +++ b/modules/ml/src/main/java/org/apache/ignite/ml/structures/LabeledDataset.java @@ -27,7 +27,7 @@ import org.apache.ignite.ml.math.impls.vector.SparseDistributedVector; /** * Class for set of labeled vectors. */ -public class LabeledDataset extends Dataset { +public class LabeledDataset extends Dataset implements AutoCloseable { /** * Default constructor (required by Externalizable). */ @@ -70,6 +70,15 @@ public class LabeledDataset extends Dataset { initializeDataWithLabeledVectors(); } + /** + * Creates new Labeled Dataset by given data. + * + * @param data Should be initialized with one vector at least. + */ + public LabeledDataset(Row[] data) { + super(data); + } + /** */ private void initializeDataWithLabeledVectors() { data = (Row[])new LabeledVector[rowSize]; @@ -207,4 +216,9 @@ public class LabeledDataset extends Dataset { return res; } + + /** Closes LabeledDataset. */ + @Override public void close() throws Exception { + + } } http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationModel.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationModel.java b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationModel.java index 6d93402..4fd2e0e 100644 --- a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationModel.java +++ b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationModel.java @@ -146,7 +146,9 @@ public class SVMLinearBinaryClassificationModel implements Model return true; if (o == null || getClass() != o.getClass()) return false; + SVMLinearBinaryClassificationModel mdl = (SVMLinearBinaryClassificationModel)o; + return Double.compare(mdl.intercept, intercept) == 0 && Double.compare(mdl.threshold, threshold) == 0 && Boolean.compare(mdl.isKeepingRawLabels, isKeepingRawLabels) == 0 @@ -176,7 +178,7 @@ public class SVMLinearBinaryClassificationModel implements Model return builder.toString(); } - return "LinearRegressionModel{" + + return "SVMModel{" + "weights=" + weights + ", intercept=" + intercept + '}'; http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationTrainer.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationTrainer.java b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationTrainer.java index ee3b6e8..e745ca9 100644 --- a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationTrainer.java +++ b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearBinaryClassificationTrainer.java @@ -18,10 +18,13 @@ package org.apache.ignite.ml.svm; import java.util.concurrent.ThreadLocalRandom; -import org.apache.ignite.ml.Trainer; +import org.apache.ignite.ml.DatasetTrainer; +import org.apache.ignite.ml.dataset.Dataset; +import org.apache.ignite.ml.dataset.DatasetBuilder; +import org.apache.ignite.ml.dataset.PartitionDataBuilder; import org.apache.ignite.ml.math.Vector; +import org.apache.ignite.ml.math.functions.IgniteBiFunction; import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector; -import org.apache.ignite.ml.math.impls.vector.SparseDistributedVector; import org.apache.ignite.ml.structures.LabeledDataset; import org.apache.ignite.ml.structures.LabeledVector; import org.jetbrains.annotations.NotNull; @@ -32,33 +35,50 @@ import org.jetbrains.annotations.NotNull; * and +1 labels for two classes and makes binary classification.

The paper about this algorithm could be found * here https://arxiv.org/abs/1409.1458. */ -public class SVMLinearBinaryClassificationTrainer implements Trainer { +public class SVMLinearBinaryClassificationTrainer + implements DatasetTrainer { /** Amount of outer SDCA algorithm iterations. */ - private int amountOfIterations = 20; + private int amountOfIterations = 200; /** Amount of local SDCA algorithm iterations. */ - private int amountOfLocIterations = 50; + private int amountOfLocIterations = 100; /** Regularization parameter. */ - private double lambda = 0.2; + private double lambda = 0.4; - /** This flag enables distributed mode for this algorithm. */ - private boolean isDistributed; + /** Dataset. */ + private Dataset> dataset; /** - * Returns model based on data + * Trains model based on the specified data. * - * @param data data to build model - * @return model + * @param datasetBuilder Dataset builder. + * @param featureExtractor Feature extractor. + * @param lbExtractor Label extractor. + * @param cols Number of columns. + * @return Model. */ - @Override public SVMLinearBinaryClassificationModel train(LabeledDataset data) { - isDistributed = data.isDistributed(); + @Override public SVMLinearBinaryClassificationModel fit(DatasetBuilder datasetBuilder, IgniteBiFunction featureExtractor, IgniteBiFunction lbExtractor, int cols) { - final int weightVectorSizeWithIntercept = data.colSize() + 1; + assert datasetBuilder != null; + + PartitionDataBuilder> partDataBuilder = new SVMPartitionDataBuilderOnHeap<>( + featureExtractor, + lbExtractor, + cols + ); + + this.dataset = datasetBuilder.build( + (upstream, upstreamSize) -> new SVMPartitionContext(), + partDataBuilder + ); + + + final int weightVectorSizeWithIntercept = cols + 1; Vector weights = initializeWeightsWithZeros(weightVectorSizeWithIntercept); for (int i = 0; i < this.getAmountOfIterations(); i++) { - Vector deltaWeights = calculateUpdates(data, weights); + Vector deltaWeights = calculateUpdates(weights); weights = weights.plus(deltaWeights); // creates new vector } @@ -67,34 +87,32 @@ public class SVMLinearBinaryClassificationTrainer implements Trainer { + Vector copiedWeights = weights.copy(); + Vector deltaWeights = initializeWeightsWithZeros(weights.size()); + final int amountOfObservation = data.rowSize(); - Vector tmpAlphas = initializeWeightsWithZeros(amountOfObservation); - Vector deltaAlphas = initializeWeightsWithZeros(amountOfObservation); + Vector tmpAlphas = initializeWeightsWithZeros(amountOfObservation); + Vector deltaAlphas = initializeWeightsWithZeros(amountOfObservation); - for (int i = 0; i < this.getAmountOfLocIterations(); i++) { - int randomIdx = ThreadLocalRandom.current().nextInt(amountOfObservation); + for (int i = 0; i < this.getAmountOfLocIterations(); i++) { + int randomIdx = ThreadLocalRandom.current().nextInt(amountOfObservation); - Deltas deltas = getDeltas(data, copiedWeights, amountOfObservation, tmpAlphas, randomIdx); + Deltas deltas = getDeltas(data, copiedWeights, amountOfObservation, tmpAlphas, randomIdx); - copiedWeights = copiedWeights.plus(deltas.deltaWeights); // creates new vector - deltaWeights = deltaWeights.plus(deltas.deltaWeights); // creates new vector + copiedWeights = copiedWeights.plus(deltas.deltaWeights); // creates new vector + deltaWeights = deltaWeights.plus(deltas.deltaWeights); // creates new vector - tmpAlphas.set(randomIdx, tmpAlphas.get(randomIdx) + deltas.deltaAlpha); - deltaAlphas.set(randomIdx, deltaAlphas.get(randomIdx) + deltas.deltaAlpha); - } - return deltaWeights; + tmpAlphas.set(randomIdx, tmpAlphas.get(randomIdx) + deltas.deltaAlpha); + deltaAlphas.set(randomIdx, deltaAlphas.get(randomIdx) + deltas.deltaAlpha); + } + return deltaWeights; + }, (a, b) -> a == null ? b : a.plus(b)); } /** */ @@ -225,6 +243,7 @@ public class SVMLinearBinaryClassificationTrainer implements Trainer, Exportable, Serializable { - /** List of models associated with each class. */ - private Map models; - - /** */ - public SVMLinearMultiClassClassificationModel() { - this.models = new HashMap<>(); - } - - /** {@inheritDoc} */ - @Override public Double apply(Vector input) { - TreeMap maxMargins = new TreeMap<>(); - - models.forEach((k, v) -> maxMargins.put(input.dot(v.weights()) + v.intercept(), k)); - - return maxMargins.lastEntry().getValue(); - } - - /** {@inheritDoc} */ - @Override public

void saveModel(Exporter exporter, P path) { - exporter.save(this, path); - } - - /** {@inheritDoc} */ - @Override public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - SVMLinearMultiClassClassificationModel mdl = (SVMLinearMultiClassClassificationModel)o; - return Objects.equals(models, mdl.models); - } - - /** {@inheritDoc} */ - @Override public int hashCode() { - return Objects.hash(models); - } - - /** {@inheritDoc} */ - @Override public String toString() { - StringBuilder wholeStr = new StringBuilder(); - - models.forEach((clsLb, mdl) -> { - wholeStr.append("The class with label " + clsLb + " has classifier: " + mdl.toString() + System.lineSeparator()); - }); - - return wholeStr.toString(); - } - - /** - * Adds a specific SVM binary classifier to the bunch of same classifiers. - * - * @param clsLb The class label for the added model. - * @param mdl The model. - */ - public void add(double clsLb, SVMLinearBinaryClassificationModel mdl) { - models.put(clsLb, mdl); - } -} http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearMultiClassClassificationTrainer.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearMultiClassClassificationTrainer.java b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearMultiClassClassificationTrainer.java deleted file mode 100644 index 669e2e3..0000000 --- a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMLinearMultiClassClassificationTrainer.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.ml.svm; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import org.apache.ignite.ml.Trainer; -import org.apache.ignite.ml.structures.LabeledDataset; - -/** - * Base class for a soft-margin SVM linear multiclass-classification trainer based on the communication-efficient - * distributed dual coordinate ascent algorithm (CoCoA) with hinge-loss function. - * - * All common parameters are shared with bunch of binary classification trainers. - */ -public class SVMLinearMultiClassClassificationTrainer implements Trainer { - /** Amount of outer SDCA algorithm iterations. */ - private int amountOfIterations = 20; - - /** Amount of local SDCA algorithm iterations. */ - private int amountOfLocIterations = 50; - - /** Regularization parameter. */ - private double lambda = 0.2; - - /** - * Returns model based on data. - * - * @param data data to build model. - * @return model. - */ - @Override public SVMLinearMultiClassClassificationModel train(LabeledDataset data) { - List classes = getClassLabels(data); - - SVMLinearMultiClassClassificationModel multiClsMdl = new SVMLinearMultiClassClassificationModel(); - - classes.forEach(clsLb -> { - LabeledDataset binarizedDataset = binarizeLabels(data, clsLb); - - SVMLinearBinaryClassificationTrainer trainer = new SVMLinearBinaryClassificationTrainer() - .withAmountOfIterations(this.amountOfIterations()) - .withAmountOfLocIterations(this.amountOfLocIterations()) - .withLambda(this.lambda()); - - multiClsMdl.add(clsLb, trainer.train(binarizedDataset)); - }); - - return multiClsMdl; - } - - /** - * Copies the given data and changes class labels in +1 for chosen class and in -1 for the rest classes. - * - * @param data Data to transform. - * @param clsLb Chosen class in schema One-vs-Rest. - * @return Copy of dataset with new labels. - */ - private LabeledDataset binarizeLabels(LabeledDataset data, double clsLb) { - final LabeledDataset ds = data.copy(); - - for (int i = 0; i < ds.rowSize(); i++) - ds.setLabel(i, ds.label(i) == clsLb ? 1.0 : -1.0); - - return ds; - } - - /** Iterates among dataset and collects class labels. */ - private List getClassLabels(LabeledDataset data) { - final Set clsLabels = new HashSet<>(); - - for (int i = 0; i < data.rowSize(); i++) - clsLabels.add(data.label(i)); - - List res = new ArrayList<>(); - res.addAll(clsLabels); - - return res; - } - - /** - * Set up the regularization parameter. - * - * @param lambda The regularization parameter. Should be more than 0.0. - * @return Trainer with new lambda parameter value. - */ - public SVMLinearMultiClassClassificationTrainer withLambda(double lambda) { - assert lambda > 0.0; - this.lambda = lambda; - return this; - } - - /** - * Gets the regularization lambda. - * - * @return The parameter value. - */ - public double lambda() { - return lambda; - } - - /** - * Gets the amount of outer iterations of SCDA algorithm. - * - * @return The parameter value. - */ - public int amountOfIterations() { - return amountOfIterations; - } - - /** - * Set up the amount of outer iterations of SCDA algorithm. - * - * @param amountOfIterations The parameter value. - * @return Trainer with new amountOfIterations parameter value. - */ - public SVMLinearMultiClassClassificationTrainer withAmountOfIterations(int amountOfIterations) { - this.amountOfIterations = amountOfIterations; - return this; - } - - /** - * Gets the amount of local iterations of SCDA algorithm. - * - * @return The parameter value. - */ - public int amountOfLocIterations() { - return amountOfLocIterations; - } - - /** - * Set up the amount of local iterations of SCDA algorithm. - * - * @param amountOfLocIterations The parameter value. - * @return Trainer with new amountOfLocIterations parameter value. - */ - public SVMLinearMultiClassClassificationTrainer withAmountOfLocIterations(int amountOfLocIterations) { - this.amountOfLocIterations = amountOfLocIterations; - return this; - } -} - - - http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionContext.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionContext.java b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionContext.java new file mode 100644 index 0000000..0aee0fb --- /dev/null +++ b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionContext.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.svm; + +import java.io.Serializable; + +/** + * Partition context of the SVM classification algorithm. + */ +public class SVMPartitionContext implements Serializable { + /** */ + private static final long serialVersionUID = -7212307112344430126L; +} http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionDataBuilderOnHeap.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionDataBuilderOnHeap.java b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionDataBuilderOnHeap.java new file mode 100644 index 0000000..ad85758 --- /dev/null +++ b/modules/ml/src/main/java/org/apache/ignite/ml/svm/SVMPartitionDataBuilderOnHeap.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.svm; + +import java.io.Serializable; +import java.util.Iterator; +import org.apache.ignite.ml.dataset.PartitionDataBuilder; +import org.apache.ignite.ml.dataset.UpstreamEntry; +import org.apache.ignite.ml.math.functions.IgniteBiFunction; +import org.apache.ignite.ml.structures.LabeledDataset; +import org.apache.ignite.ml.structures.LabeledVector; + +/** + * SVM partition data builder that builds {@link LabeledDataset}. + * + * @param Type of a key in upstream data. + * @param Type of a value in upstream data. + * @param Type of a partition context. + */ +public class SVMPartitionDataBuilderOnHeap + implements PartitionDataBuilder> { + /** */ + private static final long serialVersionUID = -7820760153954269227L; + + /** Extractor of X matrix row. */ + private final IgniteBiFunction xExtractor; + + /** Extractor of Y vector value. */ + private final IgniteBiFunction yExtractor; + + /** Number of columns. */ + private final int cols; + + /** + * Constructs a new instance of SVM partition data builder. + * + * @param xExtractor Extractor of X matrix row. + * @param yExtractor Extractor of Y vector value. + * @param cols Number of columns. + */ + public SVMPartitionDataBuilderOnHeap(IgniteBiFunction xExtractor, + IgniteBiFunction yExtractor, int cols) { + this.xExtractor = xExtractor; + this.yExtractor = yExtractor; + this.cols = cols; + } + + /** {@inheritDoc} */ + @Override public LabeledDataset build(Iterator> upstreamData, long upstreamDataSize, + C ctx) { + + double[][] x = new double[Math.toIntExact(upstreamDataSize)][cols]; + double[] y = new double[Math.toIntExact(upstreamDataSize)]; + + int ptr = 0; + while (upstreamData.hasNext()) { + UpstreamEntry entry = upstreamData.next(); + double[] row = xExtractor.apply(entry.getKey(), entry.getValue()); + + assert row.length == cols : "X extractor must return exactly " + cols + " columns"; + + x[ptr] = row; + + y[ptr] = yExtractor.apply(entry.getKey(), entry.getValue()); + + ptr++; + } + + return new LabeledDataset<>(x, y); + } +} http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/LocalModelsTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/LocalModelsTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/LocalModelsTest.java index 57d93d6..421a19f 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/LocalModelsTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/LocalModelsTest.java @@ -32,7 +32,6 @@ import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector; import org.apache.ignite.ml.regressions.linear.LinearRegressionModel; import org.apache.ignite.ml.structures.LabeledDataset; import org.apache.ignite.ml.svm.SVMLinearBinaryClassificationModel; -import org.apache.ignite.ml.svm.SVMLinearMultiClassClassificationModel; import org.junit.Assert; import org.junit.Test; @@ -98,33 +97,6 @@ public class LocalModelsTest { /** */ - @Test - public void importExportSVMMulticlassClassificationModelTest() throws IOException { - executeModelTest(mdlFilePath -> { - - - SVMLinearBinaryClassificationModel binaryMdl1 = new SVMLinearBinaryClassificationModel(new DenseLocalOnHeapVector(new double[]{1, 2}), 3); - SVMLinearBinaryClassificationModel binaryMdl2 = new SVMLinearBinaryClassificationModel(new DenseLocalOnHeapVector(new double[]{2, 3}), 4); - SVMLinearBinaryClassificationModel binaryMdl3 = new SVMLinearBinaryClassificationModel(new DenseLocalOnHeapVector(new double[]{3, 4}), 5); - - SVMLinearMultiClassClassificationModel mdl = new SVMLinearMultiClassClassificationModel(); - mdl.add(1, binaryMdl1); - mdl.add(2, binaryMdl2); - mdl.add(3, binaryMdl3); - - Exporter exporter = new FileExporter<>(); - mdl.saveModel(exporter, mdlFilePath); - - SVMLinearMultiClassClassificationModel load = exporter.load(mdlFilePath); - - Assert.assertNotNull(load); - Assert.assertEquals("", mdl, load); - - return null; - }); - } - - /** */ private void executeModelTest(Function code) throws IOException { Path mdlPath = Files.createTempFile(null, null); http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/svm/BaseSVMTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/svm/BaseSVMTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/svm/BaseSVMTest.java deleted file mode 100644 index 424118d..0000000 --- a/modules/ml/src/test/java/org/apache/ignite/ml/svm/BaseSVMTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.ml.svm; - -import org.apache.ignite.Ignite; -import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; - -/** - * Base class for SVM tests. - */ -public class BaseSVMTest extends GridCommonAbstractTest { - /** Count of nodes. */ - private static final int NODE_COUNT = 4; - - /** Grid instance. */ - protected Ignite ignite; - - /** - * Default constructor. - */ - public BaseSVMTest() { - super(false); - } - - /** - * {@inheritDoc} - */ - @Override protected void beforeTest() throws Exception { - ignite = grid(NODE_COUNT); - } - - /** {@inheritDoc} */ - @Override protected void beforeTestsStarted() throws Exception { - for (int i = 1; i <= NODE_COUNT; i++) - startGrid(i); - } - - /** {@inheritDoc} */ - @Override protected void afterTestsStopped() throws Exception { - stopAllGrids(); - } - -} http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java new file mode 100644 index 0000000..353915c --- /dev/null +++ b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMBinaryTrainerTest.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.ml.svm; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ThreadLocalRandom; +import org.apache.ignite.ml.TestUtils; +import org.apache.ignite.ml.dataset.impl.local.LocalDatasetBuilder; +import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector; +import org.junit.Test; + +/** + * Tests for {@link SVMLinearBinaryClassificationTrainer}. + */ +public class SVMBinaryTrainerTest { + /** Fixed size of Dataset. */ + private static final int AMOUNT_OF_OBSERVATIONS = 1000; + + /** Fixed size of columns in Dataset. */ + private static final int AMOUNT_OF_FEATURES = 2; + + /** + * Test trainer on classification model y = x. + */ + @Test + public void testTrainWithTheLinearlySeparableCase() { + Map data = new HashMap<>(); + + + ThreadLocalRandom rndX = ThreadLocalRandom.current(); + ThreadLocalRandom rndY = ThreadLocalRandom.current(); + + for (int i = 0; i < AMOUNT_OF_OBSERVATIONS; i++) { + double x = rndX.nextDouble(-1000, 1000); + double y = rndY.nextDouble(-1000, 1000); + double[] vec = new double[AMOUNT_OF_FEATURES + 1]; + vec[0] = y - x > 0 ? 1 : -1; // assign label. + vec[1] = x; + vec[2] = y; + data.put(i, vec); + } + + + SVMLinearBinaryClassificationTrainer trainer = new SVMLinearBinaryClassificationTrainer<>(); + + SVMLinearBinaryClassificationModel mdl = trainer.fit( + new LocalDatasetBuilder<>(data, 10), + (k, v) -> Arrays.copyOfRange(v, 1, v.length), + (k, v) -> v[0], + AMOUNT_OF_FEATURES); + + double precision = 1e-2; + + TestUtils.assertEquals(-1, mdl.apply(new DenseLocalOnHeapVector(new double[]{100, 10})), precision); + TestUtils.assertEquals(1, mdl.apply(new DenseLocalOnHeapVector(new double[]{10, 100})), precision); + } +} http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java index 35b6644..2533466 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMModelTest.java @@ -59,21 +59,6 @@ public class SVMModelTest { /** */ @Test - public void testPredictWithMultiClasses() { - Vector weights1 = new DenseLocalOnHeapVector(new double[]{10.0, 0.0}); - Vector weights2 = new DenseLocalOnHeapVector(new double[]{0.0, 10.0}); - Vector weights3 = new DenseLocalOnHeapVector(new double[]{-1.0, -1.0}); - SVMLinearMultiClassClassificationModel mdl = new SVMLinearMultiClassClassificationModel(); - mdl.add(1, new SVMLinearBinaryClassificationModel(weights1, 0.0).withRawLabels(true)); - mdl.add(2, new SVMLinearBinaryClassificationModel(weights2, 0.0).withRawLabels(true)); - mdl.add(2, new SVMLinearBinaryClassificationModel(weights3, 0.0).withRawLabels(true)); - - Vector observation = new DenseLocalOnHeapVector(new double[]{1.0, 1.0}); - TestUtils.assertEquals( 1.0, mdl.apply(observation), PRECISION); - } - - /** */ - @Test public void testPredictWithErasedLabels() { Vector weights = new DenseLocalOnHeapVector(new double[]{1.0, 1.0}); SVMLinearBinaryClassificationModel mdl = new SVMLinearBinaryClassificationModel(weights, 1.0); http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java index 853a43f..dd87fec 100644 --- a/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java +++ b/modules/ml/src/test/java/org/apache/ignite/ml/svm/SVMTestSuite.java @@ -17,23 +17,16 @@ package org.apache.ignite.ml.svm; -import org.apache.ignite.ml.svm.binary.DistributedLinearSVMBinaryClassificationTrainerTest; -import org.apache.ignite.ml.svm.binary.LocalLinearSVMBinaryClassificationTrainerTest; -import org.apache.ignite.ml.svm.multi.DistributedLinearSVMMultiClassClassificationTrainerTest; -import org.apache.ignite.ml.svm.multi.LocalLinearSVMMultiClassClassificationTrainerTest; import org.junit.runner.RunWith; import org.junit.runners.Suite; /** - * Test suite for all tests located in org.apache.ignite.ml.regressions.* package. + * Test suite for all tests located in org.apache.ignite.ml.svm.* package. */ @RunWith(Suite.class) @Suite.SuiteClasses({ - LocalLinearSVMBinaryClassificationTrainerTest.class, - DistributedLinearSVMBinaryClassificationTrainerTest.class, - LocalLinearSVMMultiClassClassificationTrainerTest.class, - DistributedLinearSVMMultiClassClassificationTrainerTest.class, - SVMModelTest.class + SVMModelTest.class, + SVMBinaryTrainerTest.class }) public class SVMTestSuite { // No-op. http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/DistributedLinearSVMBinaryClassificationTrainerTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/DistributedLinearSVMBinaryClassificationTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/DistributedLinearSVMBinaryClassificationTrainerTest.java deleted file mode 100644 index 1be1d1c..0000000 --- a/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/DistributedLinearSVMBinaryClassificationTrainerTest.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.ml.svm.binary; - -import org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix; -import org.apache.ignite.ml.regressions.linear.LinearRegressionSGDTrainer; -import org.apache.ignite.ml.svm.SVMLinearBinaryClassificationTrainer; - -/** - * Tests for {@link LinearRegressionSGDTrainer} on {@link DenseLocalOnHeapMatrix}. - */ -public class DistributedLinearSVMBinaryClassificationTrainerTest extends GenericLinearSVMBinaryClassificationTrainerTest { - /** */ - public DistributedLinearSVMBinaryClassificationTrainerTest() { - super( - new SVMLinearBinaryClassificationTrainer(), - true, - 1e-2); - } -} http://git-wip-us.apache.org/repos/asf/ignite/blob/318ffe50/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/GenericLinearSVMBinaryClassificationTrainerTest.java ---------------------------------------------------------------------- diff --git a/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/GenericLinearSVMBinaryClassificationTrainerTest.java b/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/GenericLinearSVMBinaryClassificationTrainerTest.java deleted file mode 100644 index f390557..0000000 --- a/modules/ml/src/test/java/org/apache/ignite/ml/svm/binary/GenericLinearSVMBinaryClassificationTrainerTest.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ignite.ml.svm.binary; - -import java.util.concurrent.ThreadLocalRandom; -import org.apache.ignite.internal.util.IgniteUtils; -import org.apache.ignite.ml.TestUtils; -import org.apache.ignite.ml.Trainer; -import org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector; -import org.apache.ignite.ml.structures.LabeledDataset; -import org.apache.ignite.ml.structures.LabeledVector; -import org.apache.ignite.ml.svm.BaseSVMTest; -import org.apache.ignite.ml.svm.SVMLinearBinaryClassificationModel; -import org.junit.Test; - -/** - * Base class for all linear regression trainers. - */ -public class GenericLinearSVMBinaryClassificationTrainerTest extends BaseSVMTest { - /** Fixed size of Dataset. */ - private static final int AMOUNT_OF_OBSERVATIONS = 100; - - /** Fixed size of columns in Dataset. */ - private static final int AMOUNT_OF_FEATURES = 2; - - /** */ - private final Trainer trainer; - - /** */ - private boolean isDistributed; - - /** */ - private final double precision; - - /** */ - GenericLinearSVMBinaryClassificationTrainerTest( - Trainer trainer, - boolean isDistributed, - double precision) { - super(); - this.trainer = trainer; - this.precision = precision; - this.isDistributed = isDistributed; - } - - /** - * Test trainer on classification model y = x. - */ - @Test - public void testTrainWithTheLinearlySeparableCase() { - if (isDistributed) - IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); - - LabeledDataset dataset = new LabeledDataset(AMOUNT_OF_OBSERVATIONS, AMOUNT_OF_FEATURES, isDistributed); - - ThreadLocalRandom rndX = ThreadLocalRandom.current(); - ThreadLocalRandom rndY = ThreadLocalRandom.current(); - for (int i = 0; i < AMOUNT_OF_OBSERVATIONS; i++) { - double x = rndX.nextDouble(-1000, 1000); - double y = rndY.nextDouble(-1000, 1000); - dataset.features(i).set(0, x); - dataset.features(i).set(1, y); - double lb = y - x > 0 ? 1 : -1; - dataset.setLabel(i, lb); - } - - SVMLinearBinaryClassificationModel mdl = trainer.train(dataset); - - TestUtils.assertEquals(-1, mdl.apply(new DenseLocalOnHeapVector(new double[] {100, 10})), precision); - TestUtils.assertEquals(1, mdl.apply(new DenseLocalOnHeapVector(new double[] {10, 100})), precision); - } - - /** - * Test trainer on classification model y = x. Amount of generated points is increased 10 times. - */ - @Test - public void testTrainWithTheLinearlySeparableCase10() { - if (isDistributed) - IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); - - LabeledDataset dataset = new LabeledDataset(AMOUNT_OF_OBSERVATIONS * 10, AMOUNT_OF_FEATURES, isDistributed); - - ThreadLocalRandom rndX = ThreadLocalRandom.current(); - ThreadLocalRandom rndY = ThreadLocalRandom.current(); - for (int i = 0; i < AMOUNT_OF_OBSERVATIONS * 10; i++) { - double x = rndX.nextDouble(-1000, 1000); - double y = rndY.nextDouble(-1000, 1000); - dataset.features(i).set(0, x); - dataset.features(i).set(1, y); - double lb = y - x > 0 ? 1 : -1; - dataset.setLabel(i, lb); - } - - SVMLinearBinaryClassificationModel mdl = trainer.train(dataset); - - TestUtils.assertEquals(-1, mdl.apply(new DenseLocalOnHeapVector(new double[] {100, 10})), precision); - TestUtils.assertEquals(1, mdl.apply(new DenseLocalOnHeapVector(new double[] {10, 100})), precision); - } - - /** - * Test trainer on classification model y = x. Amount of generated points is increased 100 times. - */ - @Test - public void testTrainWithTheLinearlySeparableCase100() { - if (isDistributed) - IgniteUtils.setCurrentIgniteName(ignite.configuration().getIgniteInstanceName()); - - LabeledDataset dataset = new LabeledDataset(AMOUNT_OF_OBSERVATIONS * 100, AMOUNT_OF_FEATURES, isDistributed); - - ThreadLocalRandom rndX = ThreadLocalRandom.current(); - ThreadLocalRandom rndY = ThreadLocalRandom.current(); - for (int i = 0; i < AMOUNT_OF_OBSERVATIONS * 100; i++) { - double x = rndX.nextDouble(-1000, 1000); - double y = rndY.nextDouble(-1000, 1000); - dataset.features(i).set(0, x); - dataset.features(i).set(1, y); - double lb = y - x > 0 ? 1 : -1; - dataset.setLabel(i, lb); - } - - SVMLinearBinaryClassificationModel mdl = trainer.train(dataset); - - TestUtils.assertEquals(-1, mdl.apply(new DenseLocalOnHeapVector(new double[] {100, 10})), precision); - TestUtils.assertEquals(1, mdl.apply(new DenseLocalOnHeapVector(new double[] {10, 100})), precision); - } -}