mahout-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From vans...@apache.org
Subject [12/52] [partial] mahout git commit: removed all files except for website directory
Date Tue, 27 Jun 2017 16:14:37 GMT
http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/algorithms/RegressionSuiteBase.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/algorithms/RegressionSuiteBase.scala b/math-scala/src/test/scala/org/apache/mahout/math/algorithms/RegressionSuiteBase.scala
deleted file mode 100644
index 8910ae9..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/algorithms/RegressionSuiteBase.scala
+++ /dev/null
@@ -1,180 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.mahout.math.algorithms
-
-import org.apache.mahout.math.algorithms.regression._
-import org.apache.mahout.math.drm._
-import org.apache.mahout.math.drm.RLikeDrmOps._
-import org.apache.mahout.math.scalabindings._
-import org.apache.mahout.math.scalabindings.RLikeOps._
-import org.apache.mahout.test.DistributedMahoutSuite
-import org.scalatest.{FunSuite, Matchers}
-
-trait RegressionSuiteBase extends DistributedMahoutSuite with Matchers {
-  this: FunSuite =>
-
-  val epsilon = 1E-6
-
-  test("ordinary least squares") {
-    /*
-    R Prototype:
-    dataM <- matrix( c(2, 2, 10.5, 10, 29.509541,
-      1, 2, 12,   12, 18.042851,
-      1, 1, 12,   13, 22.736446,
-      2, 1, 11,   13, 32.207582,
-      1, 2, 12,   11, 21.871292,
-      2, 1, 16,   8,  36.187559,
-      6, 2, 17,   1,  50.764999,
-      3, 2, 13,   7,  40.400208,
-      3, 3, 13,   4,  45.811716), nrow=9, ncol=5, byrow=TRUE)
-
-
-    X = dataM[, c(1,2,3,4)]
-    y = dataM[, c(5)]
-
-    model <- lm(y ~ X )
-    summary(model)
-
-     */
-
-    val drmData = drmParallelize(dense(
-      (2, 2, 10.5, 10, 29.509541),  // Apple Cinnamon Cheerios
-      (1, 2, 12,   12, 18.042851),  // Cap'n'Crunch
-      (1, 1, 12,   13, 22.736446),  // Cocoa Puffs
-      (2, 1, 11,   13, 32.207582),  // Froot Loops
-      (1, 2, 12,   11, 21.871292),  // Honey Graham Ohs
-      (2, 1, 16,   8,  36.187559),  // Wheaties Honey Gold
-      (6, 2, 17,   1,  50.764999),  // Cheerios
-      (3, 2, 13,   7,  40.400208),  // Clusters
-      (3, 3, 13,   4,  45.811716)), numPartitions = 2)
-
-
-    val drmX = drmData(::, 0 until 4)
-    val drmY = drmData(::, 4 until 5)
-
-    val model = new OrdinaryLeastSquares[Int]().fit(drmX, drmY, 'calcCommonStatistics → false)
-
-    val estimate = model.beta
-    val Ranswers = dvec(-1.336265, -13.157702, -4.152654, -5.679908, 163.179329)
-
-    val epsilon = 1E-6
-    (estimate - Ranswers).sum should be < epsilon
-
-    // TODO add test for S.E / pvalue
-  }
-
-  test("cochrane-orcutt"){
-    /* R Prototype:
-    library(orcutt)
-
-    df = data.frame(t(data.frame(
-        c(20.96,  127.3),
-        c(21.40,  130.0),
-        c(21.96,  132.7),
-        c(21.52,  129.4),
-        c(22.39,  135.0),
-        c(22.76,  137.1),
-        c(23.48,  141.2),
-        c(23.66,  142.8),
-        c(24.10,  145.5),
-        c(24.01,  145.3),
-        c(24.54,  148.3),
-        c(24.30,  146.4),
-        c(25.00,  150.2),
-        c(25.64,  153.1),
-        c(26.36,  157.3),
-        c(26.98,  160.7),
-        c(27.52,  164.2),
-        c(27.78,  165.6),
-        c(28.24,  168.7),
-        c(28.78,  171.7))))
-
-    rownames(df) <- NULL
-    colnames(df) <- c("y", "x")
-    my_lm = lm(y ~ x, data=df)
-    coch = cochrane.orcutt(my_lm)
-
-    ///////////////////////////////////////
-    The R-implementation is kind of...silly.
-
-    The above works- converges at 318 iterations- the transformed DW is   1.72, yet the rho is
-     .95882.   After 318 iteartions, this will also report a rho of .95882 (which sugguests SEVERE
-     autocorrelation- nothing close to 1.72.
-
-     At anyrate, the real prototype for this is the example from Applied Linear Statistcal Models
-     5th Edition by Kunter, Nachstheim, Neter, and Li.  They also provide some interesting notes on p 494:
-     1) "Cochrane-Orcutt does not always work properly.  A major reason is that when the error terms
-     are positively autocorrelated, the estimate r in (12.22) tends to underestimate the autocorrelation
-     parameter rho.  When this bias is serious, it can significantly reduce the effectiveness of the
-     Cochrane-Orcutt approach.
-     2. There exists an approximate relation between the Durbin Watson test statistic D in (12.14)
-     and the estimated autocorrelation paramater r in (12.22):
-     D ~= 2(1-r)"
-
-     They also note on p492:
-     "... If the process does not terminate after one or two iterations, a different procedure
-     should be employed."
-     This differs from the logic found elsewhere, and the method presented in R where, in the simple
-      example in the prototype, the procedure runs for 318 iterations. This is why the default
-     maximum iteratoins are 3, and should be left as such.
-
-     Also, the prototype and 'correct answers' are based on the example presented in Kunter et. al on
-     p492-4 (including dataset).
-
-     */
-
-    val alsmBlaisdellCo = drmParallelize( dense(
-      (20.96,  127.3),
-      (21.40,  130.0),
-      (21.96,  132.7),
-      (21.52,  129.4),
-      (22.39,  135.0),
-      (22.76,  137.1),
-      (23.48,  141.2),
-      (23.66,  142.8),
-      (24.10,  145.5),
-      (24.01,  145.3),
-      (24.54,  148.3),
-      (24.30,  146.4),
-      (25.00,  150.2),
-      (25.64,  153.1),
-      (26.36,  157.3),
-      (26.98,  160.7),
-      (27.52,  164.2),
-      (27.78,  165.6),
-      (28.24,  168.7),
-      (28.78,  171.7) ))
-
-    val drmY = alsmBlaisdellCo(::, 0 until 1)
-    val drmX = alsmBlaisdellCo(::, 1 until 2)
-
-    var coModel = new CochraneOrcutt[Int]().fit(drmX, drmY , ('iterations -> 2))
-    val coResiduals = drmY - coModel.predict(drmX)
-
-    val correctRho = 0.631166
-    (coModel.rhos(1) - correctRho) should be < epsilon
-
-    val shortEpsilon = 1E-4 // book rounded off pretty short
-    val correctBeta = dvec(0.17376, -1.0685)
-    (coModel.betas(1) - correctBeta).sum.abs < shortEpsilon
-
-    val correctSe = dvec(0.002957, 0.45332)
-    (coModel.se - correctSe).sum.abs < shortEpsilon
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/algorithms/RegressionTestsSuiteBase.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/algorithms/RegressionTestsSuiteBase.scala b/math-scala/src/test/scala/org/apache/mahout/math/algorithms/RegressionTestsSuiteBase.scala
deleted file mode 100644
index 57dffef..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/algorithms/RegressionTestsSuiteBase.scala
+++ /dev/null
@@ -1,126 +0,0 @@
-/**
-  * Licensed to the Apache Software Foundation (ASF) under one
-  * or more contributor license agreements. See the NOTICE file
-  * distributed with this work for additional information
-  * regarding copyright ownership. The ASF licenses this file
-  * to you under the Apache License, Version 2.0 (the
-  * "License"); you may not use this file except in compliance
-  * with the License. You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing,
-  * software distributed under the License is distributed on an
-  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  * KIND, either express or implied. See the License for the
-  * specific language governing permissions and limitations
-  * under the License.
-  */
-
-package org.apache.mahout.math.algorithms
-
-import org.apache.mahout.math.algorithms.regression.OrdinaryLeastSquares
-import org.apache.mahout.math.algorithms.regression.tests._
-import org.apache.mahout.math.drm.{CheckpointedDrm, drmParallelize}
-import org.apache.mahout.math.drm.RLikeDrmOps._
-import org.apache.mahout.math.scalabindings.{`::`, dense}
-import org.apache.mahout.test.DistributedMahoutSuite
-import org.scalatest.{FunSuite, Matchers}
-
-
-trait RegressionTestsSuiteBase extends DistributedMahoutSuite with Matchers {
-  this: FunSuite =>
-
-  val epsilon = 1E-4
-
-  test("fittness tests") {
-    /*
-    R Prototype:
-    dataM <- matrix( c(2, 2, 10.5, 10, 29.509541,
-      1, 2, 12,   12, 18.042851,
-      1, 1, 12,   13, 22.736446,
-      2, 1, 11,   13, 32.207582,
-      1, 2, 12,   11, 21.871292,
-      2, 1, 16,   8,  36.187559,
-      6, 2, 17,   1,  50.764999,
-      3, 2, 13,   7,  40.400208,
-      3, 3, 13,   4,  45.811716), nrow=9, ncol=5, byrow=TRUE)
-
-
-    X = dataM[, c(1,2,3,4)]
-    y = dataM[, c(5)]
-
-    model <- lm(y ~ X)
-    summary(model)
-
-     */
-
-    val drmData = drmParallelize(dense(
-      (2, 2, 10.5, 10, 29.509541),  // Apple Cinnamon Cheerios
-      (1, 2, 12,   12, 18.042851),  // Cap'n'Crunch
-      (1, 1, 12,   13, 22.736446),  // Cocoa Puffs
-      (2, 1, 11,   13, 32.207582),  // Froot Loops
-      (1, 2, 12,   11, 21.871292),  // Honey Graham Ohs
-      (2, 1, 16,   8,  36.187559),  // Wheaties Honey Gold
-      (6, 2, 17,   1,  50.764999),  // Cheerios
-      (3, 2, 13,   7,  40.400208),  // Clusters
-      (3, 3, 13,   4,  45.811716)), numPartitions = 2)
-
-    val drmX = drmData(::, 0 until 4)
-    val drmY = drmData(::, 4 until 5)
-
-    val model = new OrdinaryLeastSquares[Int]().fit(drmX, drmY)
-
-    println(model.summary)
-    // Answers from running similar algorithm in R
-    val rR2 = 0.9425
-    val rMSE = 6.457157
-
-    val r2: Double = model.r2
-    val mse: Double = model.mse
-    (rR2 - r2) should be < epsilon
-    (rMSE - mse) should be < epsilon
-
-    Math.abs(model.beta.get(4) - 163.17933  ) should be < epsilon
-    Math.abs(model.beta.get(0) - (-1.33627) ) should be < epsilon
-    Math.abs(model.beta.get(1) - (-13.15770)) should be < epsilon
-    Math.abs(model.beta.get(2) - (-4.15265) ) should be < epsilon
-    Math.abs(model.beta.get(3) - (-5.679908)) should be < epsilon
-
-    Math.abs(model.tScore.get(0) - (-0.49715717)) should be < epsilon
-    Math.abs(model.tScore.get(1) - (-2.43932888)) should be < epsilon
-    Math.abs(model.tScore.get(2) - (-2.32654000)) should be < epsilon
-    Math.abs(model.tScore.get(3) - (-3.01022444)) should be < epsilon
-    Math.abs(model.tScore.get(4) -  3.143183937 ) should be < epsilon
-
-    model.degreesOfFreedom should equal(5)
-    model.trainingExamples should equal(9)
-
-    Math.abs((model.fScore - 16.38542361))  should be < 0.0000001
-
-  }
-
-  test("durbinWatsonTest test") {
-    /**
-      * R Prototype
-      *
-      * library(car)
-      * residuals <- seq(0, 4.9, 0.1)
-      * ## perform Durbin-Watson test
-      * durbinWatsonTest(residuals)
-      */
-
-    val correctAnswer = 0.001212121
-    val err1 =  drmParallelize( dense((0.0 until 5.0 by 0.1).toArray) ).t
-    val drmX = drmParallelize( dense((0 until 50).toArray.map( t => Math.pow(-1.0, t)) ) ).t
-    val drmY = drmX + err1 + 1
-    var model = new OrdinaryLeastSquares[Int]().fit(drmX, drmY)
-    val syntheticResiduals = err1
-    model = AutocorrelationTests.DurbinWatson(model, syntheticResiduals)
-    val myAnswer: Double = model.testResults.getOrElse('durbinWatsonTestStatistic, -1.0).asInstanceOf[Double]
-    (myAnswer - correctAnswer) should be < epsilon
-  }
-
-
-}
-

http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/backend/BackendSuite.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/backend/BackendSuite.scala b/math-scala/src/test/scala/org/apache/mahout/math/backend/BackendSuite.scala
deleted file mode 100644
index ba6e145..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/backend/BackendSuite.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-package org.apache.mahout.math.backend
-
-import org.apache.mahout.math.backend.jvm.JvmBackend
-import org.scalatest.{FunSuite, Matchers}
-
-import scala.collection.mutable
-import scala.reflect.{ClassTag, classTag}
-
-class BackendSuite extends FunSuite with Matchers {
-
-  test("GenericBackend") {
-
-    trait MySolverTrait1 { def myMethod1 = Unit }
-
-
-    trait MySolverTrait2
-
-    class MySolverImpl1 extends MySolverTrait1 {
-    }
-
-    class MySolverImpl2 extends MySolverTrait2
-
-    // My dummy backend supporting to trait solvers filled with 2 dummy implementations of these
-    // traits should be able to serve based on their solver traits.
-    val myBackend = new Backend {
-
-      override def isAvailable: Boolean = true
-
-      override val solverMap = new mutable.HashMap[ClassTag[_], Any]()
-
-      solverMap ++= Map(
-        classTag[MySolverTrait1] → new MySolverImpl1,
-        classTag[MySolverTrait2] → new MySolverImpl2
-      )
-
-      validateMap()
-    }
-
-    myBackend.getSolver shouldBe None
-
-    val mySolver1 = myBackend.getSolver[MySolverTrait1]
-
-    // This is indeed solver1 trait type:
-    mySolver1.get.myMethod1
-    mySolver1.get.isInstanceOf[MySolverImpl1] shouldBe true
-
-    // Validator should not allow non-subclasses in implementation.
-    an [IllegalArgumentException] mustBe thrownBy {
-      myBackend.solverMap(classTag[MySolverTrait2]) = 0
-      myBackend.validateMap()
-    }
-  }
-
-  test("JvmBackend") {
-    // Just create JVM backend and validate.
-    JvmBackend.validateMap()
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/decompositions/DecompositionsSuite.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/decompositions/DecompositionsSuite.scala b/math-scala/src/test/scala/org/apache/mahout/math/decompositions/DecompositionsSuite.scala
deleted file mode 100644
index 8f5ec99..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/decompositions/DecompositionsSuite.scala
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.mahout.math.decompositions
-
-import org.scalatest.FunSuite
-import org.apache.mahout.test.MahoutSuite
-import org.apache.mahout.common.RandomUtils
-import org.apache.mahout.math._
-import scalabindings._
-import RLikeOps._
-
-/**
- * This suite tests only in-core decomposititions.
- * <P>
- *
- * We moved distributed tests into mahout-spark module since they require a concrete distributed
- * engine dependencies to run.
- * <P>
- */
-class DecompositionsSuite extends FunSuite with MahoutSuite {
-
-  test("ssvd") {
-
-    // Very naive, a full-rank only here.
-    val a = dense(
-      (1, 2, 3),
-      (3, 4, 5),
-      (-2, 6, 7),
-      (-3, 8, 9)
-    )
-
-    val rank = 2
-    val (u, v, s) = ssvd(a, k = rank, q = 1)
-
-    val (uControl, vControl, sControl) = svd(a)
-
-    printf("U:\n%s\n", u)
-    printf("U-control:\n%s\n", uControl)
-    printf("V:\n%s\n", v)
-    printf("V-control:\n%s\n", vControl)
-    printf("Sigma:\n%s\n", s)
-    printf("Sigma-control:\n%s\n", sControl)
-
-    (s - sControl(0 until rank)).norm(2) should be < 1E-7
-
-    // Singular vectors may be equivalent down to a sign only.
-    (u.norm - uControl(::, 0 until rank).norm).abs should be < 1E-7
-    (v.norm - vControl(::, 0 until rank).norm).abs should be < 1E-7
-  }
-
-  test("spca") {
-
-    import math._
-
-    val rnd = RandomUtils.getRandom
-
-    // Number of points
-    val m = 500
-    // Length of actual spectrum
-    val spectrumLen = 40
-
-    val spectrum = dvec((0 until spectrumLen).map(x => 300.0 * exp(-x) max 1e-3))
-    printf("spectrum:%s\n", spectrum)
-
-    val (u, _) = qr(new SparseRowMatrix(m, spectrumLen) :=
-        ((r, c, v) => if (rnd.nextDouble() < 0.2) 0 else rnd.nextDouble() + 5.0))
-
-    // PCA Rotation matrix -- should also be orthonormal.
-    val (tr, _) = qr(Matrices.symmetricUniformView(spectrumLen, spectrumLen, rnd.nextInt) - 10.0)
-
-    val input = (u %*%: diagv(spectrum)) %*% tr.t
-
-    // Calculate just first 10 principal factors and reduce dimensionality.
-    // Since we assert just validity of the s-pca, not stochastic error, we bump p parameter to
-    // ensure to zero stochastic error and assert only functional correctness of the method's pca-
-    // specific additions.
-    val k = 10
-    var (pca, _, s) = spca(a = input, k = k, p = spectrumLen, q = 1)
-    printf("Svs:%s\n", s)
-    // Un-normalized pca data:
-    pca = pca %*%: diagv(s)
-
-    // Of course, once we calculated the pca, the spectrum is going to be different since our originally
-    // generated input was not centered. So here, we'd just brute-solve pca to verify
-    val xi = input.colMeans()
-    for (r <- 0 until input.nrow) input(r, ::) -= xi
-    var (pcaControl, _, sControl) = svd(m = input)
-
-    printf("Svs-control:%s\n", sControl)
-    pcaControl = (pcaControl %*%: diagv(sControl))(::, 0 until k)
-
-    printf("pca:\n%s\n", pca(0 until 10, 0 until 10))
-    printf("pcaControl:\n%s\n", pcaControl(0 until 10, 0 until 10))
-
-    (pca(0 until 10, 0 until 10).norm - pcaControl(0 until 10, 0 until 10).norm).abs should be < 1E-5
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/decompositions/DistributedDecompositionsSuiteBase.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/decompositions/DistributedDecompositionsSuiteBase.scala b/math-scala/src/test/scala/org/apache/mahout/math/decompositions/DistributedDecompositionsSuiteBase.scala
deleted file mode 100644
index de8228e..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/decompositions/DistributedDecompositionsSuiteBase.scala
+++ /dev/null
@@ -1,219 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.mahout.math.decompositions
-
-import org.apache.mahout.test.DistributedMahoutSuite
-import org.apache.mahout.math._
-import scalabindings._
-import RLikeOps._
-import drm._
-import RLikeDrmOps._
-import org.scalatest.{FunSuite, Matchers}
-import org.apache.mahout.common.RandomUtils
-import math._
-
-/**
- * ==Common distributed code to run against each distributed engine support.==
- *
- * Each distributed engine's decompositions package should have a suite that includes this feature
- * as part of its distributed test suite.
- *
- */
-trait DistributedDecompositionsSuiteBase extends DistributedMahoutSuite with Matchers { this:FunSuite =>
-
-
-  test("thin distributed qr") {
-
-    val inCoreA = dense(
-      (1, 2, 3, 4),
-      (2, 3, 4, 5),
-      (3, -4, 5, 6),
-      (4, 5, 6, 7),
-      (8, 6, 7, 8)
-    )
-
-    val drmA = drmParallelize(inCoreA, numPartitions = 2)
-    val (drmQ, inCoreR) = dqrThin(drmA, checkRankDeficiency = false)
-
-    // Assert optimizer still knows Q and A are identically partitioned
-    drmQ.partitioningTag should equal(drmA.partitioningTag)
-
-//    drmQ.rdd.partitions.size should be(A.rdd.partitions.size)
-//
-//    // Should also be zippable
-//    drmQ.rdd.zip(other = A.rdd)
-
-    val inCoreQ = drmQ.collect
-
-    printf("A=\n%s\n", inCoreA)
-    printf("Q=\n%s\n", inCoreQ)
-    printf("R=\n%s\n", inCoreR)
-
-    val (qControl, rControl) = qr(inCoreA)
-    printf("qControl=\n%s\n", qControl)
-    printf("rControl=\n%s\n", rControl)
-
-    // Validate with Cholesky
-    val ch = chol(inCoreA.t %*% inCoreA)
-    printf("A'A=\n%s\n", inCoreA.t %*% inCoreA)
-    printf("L:\n%s\n", ch.getL)
-
-    val rControl2 = (ch.getL cloned).t
-    val qControl2 = ch.solveRight(inCoreA)
-    printf("qControl2=\n%s\n", qControl2)
-    printf("rControl2=\n%s\n", rControl2)
-
-    // Householder approach seems to be a little bit more stable
-    (rControl - inCoreR).norm should be < 1E-5
-    (qControl - inCoreQ).norm should be < 1E-5
-
-    // Assert identicity with in-core Cholesky-based -- this should be tighter.
-    (rControl2 - inCoreR).norm should be < 1E-10
-    (qControl2 - inCoreQ).norm should be < 1E-10
-
-    // Assert orthogonality:
-    // (a) Q[,j] dot Q[,j] == 1.0 for all j
-    // (b) Q[,i] dot Q[,j] == 0.0 for all i != j
-    for (col <- 0 until inCoreQ.ncol)
-      ((inCoreQ(::, col) dot inCoreQ(::, col)) - 1.0).abs should be < 1e-10
-    for (col1 <- 0 until inCoreQ.ncol - 1; col2 <- col1 + 1 until inCoreQ.ncol)
-      (inCoreQ(::, col1) dot inCoreQ(::, col2)).abs should be < 1e-10
-
-
-  }
-
-  test("dssvd - the naive-est - q=0") {
-    dssvdNaive(q = 0)
-  }
-
-  test("ddsvd - naive - q=1") {
-    dssvdNaive(q = 1)
-  }
-
-  test("ddsvd - naive - q=2") {
-    dssvdNaive(q = 2)
-  }
-
-
-  def dssvdNaive(q: Int) {
-    val inCoreA = dense(
-      (1, 2, 3, 4),
-      (2, 3, 4, 5),
-      (3, -4, 5, 6),
-      (4, 5, 6, 7),
-      (8, 6, 7, 8)
-    )
-    val drmA = drmParallelize(inCoreA, numPartitions = 2)
-
-    val (drmU, drmV, s) = dssvd(drmA, k = 4, q = q)
-    val (inCoreU, inCoreV) = (drmU.collect, drmV.collect)
-
-    printf("U:\n%s\n", inCoreU)
-    printf("V:\n%s\n", inCoreV)
-    printf("Sigma:\n%s\n", s)
-
-    (inCoreA - (inCoreU %*%: diagv(s)) %*% inCoreV.t).norm should be < 1E-5
-  }
-
-  test("dspca") {
-
-    val rnd = RandomUtils.getRandom
-
-    // Number of points
-    val m = 500
-    // Length of actual spectrum
-    val spectrumLen = 40
-
-    val spectrum = dvec((0 until spectrumLen).map(x => 300.0 * exp(-x) max 1e-3))
-    printf("spectrum:%s\n", spectrum)
-
-    val (u, _) = qr(new SparseRowMatrix(m, spectrumLen) :=
-        ((r, c, v) => if (rnd.nextDouble() < 0.2) 0 else rnd.nextDouble() + 5.0))
-
-    // PCA Rotation matrix -- should also be orthonormal.
-    val (tr, _) = qr(Matrices.symmetricUniformView(spectrumLen, spectrumLen, rnd.nextInt) - 10.0)
-
-    val input = (u %*%: diagv(spectrum)) %*% tr.t
-    val drmInput = drmParallelize(m = input, numPartitions = 2)
-
-    // Calculate just first 10 principal factors and reduce dimensionality.
-    // Since we assert just validity of the s-pca, not stochastic error, we bump p parameter to
-    // ensure to zero stochastic error and assert only functional correctness of the method's pca-
-    // specific additions.
-    val k = 10
-
-    // Calculate just first 10 principal factors and reduce dimensionality.
-    var (drmPCA, _, s) = dspca(drmA = drmInput, k = 10, p = spectrumLen, q = 1)
-    // Un-normalized pca data:
-    drmPCA = drmPCA %*% diagv(s)
-
-    val pca = drmPCA.checkpoint(CacheHint.NONE).collect
-
-    // Of course, once we calculated the pca, the spectrum is going to be different since our originally
-    // generated input was not centered. So here, we'd just brute-solve pca to verify
-    val xi = input.colMeans()
-    for (r <- 0 until input.nrow) input(r, ::) -= xi
-    var (pcaControl, _, sControl) = svd(m = input)
-    pcaControl = (pcaControl %*%: diagv(sControl))(::, 0 until k)
-
-    printf("pca:\n%s\n", pca(0 until 10, 0 until 10))
-    printf("pcaControl:\n%s\n", pcaControl(0 until 10, 0 until 10))
-
-    (pca(0 until 10, 0 until 10).norm - pcaControl(0 until 10, 0 until 10).norm).abs should be < 1E-5
-
-  }
-
-  test("dals") {
-
-    val rnd = RandomUtils.getRandom
-
-    // Number of points
-    val m = 500
-    val n = 500
-
-    // Length of actual spectrum
-    val spectrumLen = 40
-
-    // Create singluar values with decay
-    val spectrum = dvec((0 until spectrumLen).map(x => 300.0 * exp(-x) max 1e-3))
-    printf("spectrum:%s\n", spectrum)
-
-    // Create A as an ideal input
-    val inCoreA = (qr(Matrices.symmetricUniformView(m, spectrumLen, 1234))._1 %*%: diagv(spectrum)) %*%
-        qr(Matrices.symmetricUniformView(n, spectrumLen, 2345))._1.t
-    val drmA = drmParallelize(inCoreA, numPartitions = 2)
-
-    // Decompose using ALS
-    val (drmU, drmV, rmse) = dals(drmA = drmA, k = 20).toTuple
-    val inCoreU = drmU.collect
-    val inCoreV = drmV.collect
-
-    val predict = inCoreU %*% inCoreV.t
-
-    printf("Control block:\n%s\n", inCoreA(0 until 3, 0 until 3))
-    printf("ALS factorized approximation block:\n%s\n", predict(0 until 3, 0 until 3))
-
-    val err = (inCoreA - predict).norm
-    printf("norm of residuals %f\n", err)
-    printf("train iteration rmses: %s\n", rmse)
-
-    err should be < 15e-2
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/drm/DrmLikeOpsSuiteBase.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/drm/DrmLikeOpsSuiteBase.scala b/math-scala/src/test/scala/org/apache/mahout/math/drm/DrmLikeOpsSuiteBase.scala
deleted file mode 100644
index 525da11..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/drm/DrmLikeOpsSuiteBase.scala
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.mahout.math.drm
-
-import org.apache.mahout.test.DistributedMahoutSuite
-import org.scalatest.{FunSuite, Matchers}
-import org.apache.mahout.math._
-import scalabindings._
-import RLikeOps._
-import RLikeDrmOps._
-
-import scala.reflect.{ClassTag,classTag}
-
-/** Common tests for DrmLike operators to be executed by all distributed engines. */
-trait DrmLikeOpsSuiteBase extends DistributedMahoutSuite with Matchers {
-  this: FunSuite ⇒
-
-  test("mapBlock") {
-
-    val inCoreA = dense((1, 2, 3), (2, 3, 4), (3, 4, 5), (4, 5, 6))
-    val A = drmParallelize(m = inCoreA, numPartitions = 2)
-    val B = A.mapBlock(/* Inherit width */) {
-      case (keys, block) ⇒ keys → (block += 1.0)
-    }
-
-    val inCoreB = B.collect
-    val inCoreBControl = inCoreA + 1.0
-
-    println(inCoreB)
-
-    // Assert they are the same
-    (inCoreB - inCoreBControl).norm should be < 1E-10
-    B.keyClassTag shouldBe ClassTag.Int
-
-  }
-
-  test ("mapBlock implicit keying") {
-
-    val inCoreA = dense((1, 2, 3), (2, 3, 4), (3, 4, 5), (4, 5, 6))
-    val A = drmParallelize(m = inCoreA, numPartitions = 2)
-    val B = A.mapBlock(/* Inherit width */) {
-      case (keys, block) ⇒ keys.map { k ⇒ k.toString } → block
-    }
-
-    B.keyClassTag shouldBe classTag[String]
-
-  }
-
-
-  test("allReduceBlock") {
-
-    val mxA = dense((1, 2, 3), (2, 3, 4), (3, 4, 5), (4, 5, 6))
-    val drmA = drmParallelize(mxA, numPartitions = 2)
-
-    try {
-      val mxB = drmA.allreduceBlock { case (keys, block) ⇒
-        block(::, 0 until 2).t %*% block(::, 2 until 3)
-      }
-
-      val mxControl = mxA(::, 0 until 2).t %*% mxA(::, 2 until 3)
-
-      (mxB - mxControl).norm should be < 1e-10
-
-    } catch {
-      case e: UnsupportedOperationException ⇒ // Some engines may not support this, so ignore.
-    }
-
-  }
-
-  test("col range") {
-    val inCoreA = dense((1, 2, 3), (2, 3, 4), (3, 4, 5), (4, 5, 6))
-    val A = drmParallelize(m = inCoreA, numPartitions = 2)
-    val B = A(::, 1 to 2)
-    val inCoreB = B.collect
-    val inCoreBControl = inCoreA(::, 1 to 2)
-
-    println(inCoreB)
-
-    // Assert they are the same
-    (inCoreB - inCoreBControl).norm should be < 1E-10
-
-  }
-
-  test("row range") {
-
-    val inCoreA = dense((1, 2, 3), (2, 3, 4), (3, 4, 5), (4, 5, 6))
-    val A = drmParallelize(m = inCoreA, numPartitions = 2)
-    val B = A(1 to 2, ::)
-    val inCoreB = B.collect
-    val inCoreBControl = inCoreA(1 to 2, ::)
-
-    println(inCoreB)
-
-    // Assert they are the same
-    (inCoreB - inCoreBControl).norm should be < 1E-10
-
-  }
-
-  test("col, row range") {
-
-    val inCoreA = dense((1, 2, 3), (2, 3, 4), (3, 4, 5), (4, 5, 6))
-    val A = drmParallelize(m = inCoreA, numPartitions = 2)
-    val B = A(1 to 2, 1 to 2)
-    val inCoreB = B.collect
-    val inCoreBControl = inCoreA(1 to 2, 1 to 2)
-
-    println(inCoreB)
-
-    // Assert they are the same
-    (inCoreB - inCoreBControl).norm should be < 1E-10
-
-  }
-
-  test("dsqDist(X,Y)") {
-    val m = 100
-    val n = 300
-    val d = 7
-    val mxX = Matrices.symmetricUniformView(m, d, 12345).cloned -= 5
-    val mxY = Matrices.symmetricUniformView(n, d, 1234).cloned += 10
-    val (drmX, drmY) = (drmParallelize(mxX, 3), drmParallelize(mxY, 4))
-
-    val mxDsq = dsqDist(drmX, drmY).collect
-    val mxDsqControl = new DenseMatrix(m, n) := { (r, c, _) ⇒ (mxX(r, ::) - mxY(c, ::)) ^= 2 sum }
-    (mxDsq - mxDsqControl).norm should be < 1e-7
-  }
-
-  test("dsqDist(X)") {
-    val m = 100
-    val d = 7
-    val mxX = Matrices.symmetricUniformView(m, d, 12345).cloned -= 5
-    val drmX = drmParallelize(mxX, 3)
-
-    val mxDsq = dsqDist(drmX).collect
-    val mxDsqControl = sqDist(drmX)
-    (mxDsq - mxDsqControl).norm should be < 1e-7
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/drm/DrmLikeSuiteBase.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/drm/DrmLikeSuiteBase.scala b/math-scala/src/test/scala/org/apache/mahout/math/drm/DrmLikeSuiteBase.scala
deleted file mode 100644
index 41814d8..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/drm/DrmLikeSuiteBase.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.mahout.math.drm
-
-import org.apache.mahout.test.DistributedMahoutSuite
-import org.scalatest.{FunSuite, Matchers}
-import org.apache.mahout.math._
-import scalabindings._
-import RLikeOps._
-import scala.reflect.ClassTag
-
-/** Common DRM tests to be run by all distributed engines. */
-trait DrmLikeSuiteBase extends DistributedMahoutSuite with Matchers {
-  this: FunSuite =>
-
-  test("DRM DFS i/o (local)") {
-
-    val uploadPath = TmpDir + "UploadedDRM"
-
-    val inCoreA = dense((1, 2, 3), (3, 4, 5))
-    val drmA = drmParallelize(inCoreA)
-
-    drmA.dfsWrite(path = uploadPath)
-
-    println(inCoreA)
-
-    // Load back from hdfs
-    val drmB = drmDfsRead(path = uploadPath)
-
-    // Make sure keys are correctly identified as ints
-    drmB.checkpoint(CacheHint.NONE).keyClassTag shouldBe ClassTag.Int
-
-    // Collect back into in-core
-    val inCoreB = drmB.collect
-
-    // Print out to see what it is we collected:
-    println(inCoreB)
-
-    (inCoreA - inCoreB).norm should be < 1e-7
-  }
-
-  test("DRM parallelizeEmpty") {
-
-    val drmEmpty = drmParallelizeEmpty(100, 50)
-
-    // collect back into in-core
-    val inCoreEmpty = drmEmpty.collect
-
-    inCoreEmpty.sum.abs should be < 1e-7
-    drmEmpty.nrow shouldBe 100
-    drmEmpty.ncol shouldBe 50
-    inCoreEmpty.nrow shouldBe 100
-    inCoreEmpty.ncol shouldBe 50
-
-  }
-
-
-
-}

http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/drm/RLikeDrmOpsSuiteBase.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/drm/RLikeDrmOpsSuiteBase.scala b/math-scala/src/test/scala/org/apache/mahout/math/drm/RLikeDrmOpsSuiteBase.scala
deleted file mode 100644
index 5d6d142..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/drm/RLikeDrmOpsSuiteBase.scala
+++ /dev/null
@@ -1,655 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.mahout.math.drm
-
-import org.apache.mahout.test.DistributedMahoutSuite
-import org.scalatest.{FunSuite, Matchers}
-import org.apache.mahout.math._
-import scalabindings._
-import RLikeOps._
-import RLikeDrmOps._
-import decompositions._
-import org.apache.mahout.math.drm.logical._
-import org.apache.mahout.math.drm.logical.OpAtx
-import org.apache.mahout.math.drm.logical.OpAtB
-import org.apache.mahout.math.drm.logical.OpAtA
-import org.apache.mahout.math.drm.logical.OpAewUnaryFuncFusion
-
-import scala.util.Random
-
-/** Common engine tests for distributed R-like DRM operations */
-trait RLikeDrmOpsSuiteBase extends DistributedMahoutSuite with Matchers {
-  this: FunSuite =>
-
-  val epsilon = 1E-5
-
-  test("A.t") {
-
-    val inCoreA = dense((1, 2, 3), (3, 4, 5))
-
-    val A = drmParallelize(inCoreA)
-
-    val inCoreAt = A.t.collect
-
-    // Assert first norm of difference is less than error margin.
-    (inCoreAt - inCoreA.t).norm should be < epsilon
-
-  }
-
-  test("C = A %*% B") {
-
-    val inCoreA = dense((1, 2), (3, 4))
-    val inCoreB = dense((3, 5), (4, 6))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2)
-    val B = drmParallelize(inCoreB, numPartitions = 2)
-
-    // Actual
-    val inCoreCControl = inCoreA %*% inCoreB
-
-    // Distributed operation
-    val C = A %*% B
-    val inCoreC = C.collect
-    println(inCoreC)
-
-    (inCoreC - inCoreCControl).norm should be < 1E-10
-
-    // We also should be able to collect via implicit checkpoint
-    val inCoreC2 = C.collect
-    println(inCoreC2)
-
-    (inCoreC2 - inCoreCControl).norm should be < 1E-10
-
-  }
-
-  test("C = A %*% B mapBlock {}") {
-
-    val inCoreA = dense((1, 2), (3, 4))
-    val inCoreB = dense((3, 5), (4, 6))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2).checkpoint()
-    val B = drmParallelize(inCoreB, numPartitions = 2).checkpoint()
-
-    // Actual
-    val inCoreCControl = inCoreA %*% inCoreB
-
-    A.colSums()
-    B.colSums()
-
-
-    val x = drmBroadcast(dvec(0, 0))
-    val x2 = drmBroadcast(dvec(0, 0))
-    // Distributed operation
-    val C = (B.t %*% A.t).t.mapBlock() {
-      case (keys, block) =>
-        for (row <- 0 until block.nrow) block(row, ::) += x.value + x2
-        keys -> block
-    }
-
-    val inCoreC = C checkpoint CacheHint.NONE collect;
-    println(inCoreC)
-
-    (inCoreC - inCoreCControl).norm should be < 1E-10
-
-    // We also should be able to collect via implicit checkpoint
-    val inCoreC2 = C.collect
-    println(inCoreC2)
-
-    (inCoreC2 - inCoreCControl).norm should be < 1E-10
-
-    val inCoreQ = dqrThin(C)._1.collect
-
-    printf("Q=\n%s\n", inCoreQ)
-
-    // Assert unit-orthogonality
-    ((inCoreQ(::, 0) dot inCoreQ(::, 0)) - 1.0).abs should be < 1e-10
-    (inCoreQ(::, 0) dot inCoreQ(::, 1)).abs should be < 1e-10
-
-  }
-
-  test("C = A %*% B incompatible B keys") {
-
-    val inCoreA = dense((1, 2), (3, 4))
-    val inCoreB = dense((3, 5), (4, 6))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2)
-    val B = drmParallelize(inCoreB, numPartitions = 2)
-        // Re-key B into DrmLike[String] instead of [Int]
-        .mapBlock()({
-      case (keys, block) => keys.map(_.toString) -> block
-    })
-
-    val C = A %*% B
-
-    intercept[IllegalArgumentException] {
-      // This plan must not compile
-      C.checkpoint()
-    }
-  }
-
-  test("Spark-specific C = At %*% B , join") {
-
-    val inCoreA = dense((1, 2), (3, 4), (-3, -5))
-    val inCoreB = dense((3, 5), (4, 6), (0, 1))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2)
-    val B = drmParallelize(inCoreB, numPartitions = 2)
-
-    val C = A.t %*% B
-
-    mahoutCtx.optimizerRewrite(C) should equal(OpAtB[Int](A, B))
-
-    val inCoreC = C.collect
-    val inCoreControlC = inCoreA.t %*% inCoreB
-
-    (inCoreC - inCoreControlC).norm should be < 1E-10
-
-  }
-
-
-  test("C = At %*% B , join, String-keyed") {
-
-    val inCoreA = dense((1, 2), (3, 4), (-3, -5))
-    val inCoreB = dense((3, 5), (4, 6), (0, 1))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2)
-        .mapBlock()({
-      case (keys, block) => keys.map(_.toString) -> block
-    })
-
-    val B = drmParallelize(inCoreB, numPartitions = 2)
-        .mapBlock()({
-      case (keys, block) => keys.map(_.toString) -> block
-    })
-
-    val C = A.t %*% B
-
-    mahoutCtx.optimizerRewrite(C) should equal(OpAtB[String](A, B))
-
-    val inCoreC = C.collect
-    val inCoreControlC = inCoreA.t %*% inCoreB
-
-    (inCoreC - inCoreControlC).norm should be < 1E-10
-
-  }
-
-  test("C = At %*% B , zippable, String-keyed") {
-
-    val inCoreA = dense((1, 2), (3, 4), (-3, -5))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2)
-        .mapBlock()({
-      case (keys, block) ⇒ keys.map(_.toString) → block
-    })
-
-    // Dense-A' x sparse-B used to produce error. We sparsify B here to test this as well.
-    val B = (A + 1.0).mapBlock() { case (keys, block) ⇒
-      keys → (new SparseRowMatrix(block.nrow, block.ncol) := block)
-    }
-
-    val C = A.t %*% B
-
-    mahoutCtx.optimizerRewrite(C) should equal(OpAtB[String](A, B))
-
-    val inCoreC = C.collect
-    val inCoreControlC = inCoreA.t %*% (inCoreA + 1.0)
-
-    (inCoreC - inCoreControlC).norm should be < 1E-10
-
-  }
-
-  test ("C = A %*% B.t") {
-
-    val inCoreA = dense((1, 2), (3, 4), (-3, -5))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2)
-
-    val B = A + 1.0
-
-    val C = A %*% B.t
-
-    mahoutCtx.optimizerRewrite(C) should equal(OpABt[Int](A, B))
-
-    val inCoreC = C.collect
-    val inCoreControlC = inCoreA %*% (inCoreA + 1.0).t
-
-    (inCoreC - inCoreControlC).norm should be < 1E-10
-
-  }
-
-  test("C = A %*% inCoreB") {
-
-    val inCoreA = dense((1, 2, 3), (3, 4, 5), (4, 5, 6), (5, 6, 7))
-    val inCoreB = dense((3, 5, 7, 10), (4, 6, 9, 10), (5, 6, 7, 7))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2)
-    val C = A %*% inCoreB
-
-    val inCoreC = C.collect
-    val inCoreCControl = inCoreA %*% inCoreB
-
-    println(inCoreC)
-    (inCoreC - inCoreCControl).norm should be < 1E-10
-
-  }
-
-  test("C = inCoreA %*%: B") {
-
-    val inCoreA = dense((1, 2, 3), (3, 4, 5), (4, 5, 6), (5, 6, 7))
-    val inCoreB = dense((3, 5, 7, 10), (4, 6, 9, 10), (5, 6, 7, 7))
-
-    val B = drmParallelize(inCoreB, numPartitions = 2)
-    val C = inCoreA %*%: B
-
-    val inCoreC = C.collect
-    val inCoreCControl = inCoreA %*% inCoreB
-
-    println(inCoreC)
-    (inCoreC - inCoreCControl).norm should be < 1E-10
-
-  }
-
-  test("C = A.t %*% A") {
-    val inCoreA = dense((1, 2, 3), (3, 4, 5), (4, 5, 6), (5, 6, 7))
-    val A = drmParallelize(m = inCoreA, numPartitions = 2)
-
-    val AtA = A.t %*% A
-
-    // Assert optimizer detects square
-    mahoutCtx.optimizerRewrite(action = AtA) should equal(OpAtA(A))
-
-    val inCoreAtA = AtA.collect
-    val inCoreAtAControl = inCoreA.t %*% inCoreA
-
-    (inCoreAtA - inCoreAtAControl).norm should be < 1E-10
-  }
-
-  test("C = A.t %*% A fat non-graph") {
-    // Hack the max in-mem size for this test
-    System.setProperty("mahout.math.AtA.maxInMemNCol", "540")
-
-    val inCoreA = Matrices.uniformView(400, 550, 1234)
-    val A = drmParallelize(m = inCoreA, numPartitions = 2)
-
-    val AtA = A.t %*% A
-
-    // Assert optimizer detects square
-    mahoutCtx.optimizerRewrite(action = AtA) should equal(OpAtA(A))
-
-    val inCoreAtA = AtA.collect
-    val inCoreAtAControl = inCoreA.t %*% inCoreA
-
-    (inCoreAtA - inCoreAtAControl).norm should be < 1E-10
-  }
-
-  test("C = A.t %*% A non-int key") {
-    val inCoreA = dense((1, 2, 3), (3, 4, 5), (4, 5, 6), (5, 6, 7))
-    val AintKeyd = drmParallelize(m = inCoreA, numPartitions = 2)
-    val A = AintKeyd.mapBlock() {
-      case (keys, block) => keys.map(_.toString) -> block
-    }
-
-    val AtA = A.t %*% A
-
-    // Assert optimizer detects square
-    mahoutCtx.optimizerRewrite(action = AtA) should equal(OpAtA(A))
-
-    val inCoreAtA = AtA.collect
-    val inCoreAtAControl = inCoreA.t %*% inCoreA
-
-    (inCoreAtA - inCoreAtAControl).norm should be < 1E-10
-  }
-
-  test("C = A + B") {
-
-    val inCoreA = dense((1, 2), (3, 4))
-    val inCoreB = dense((3, 5), (4, 6))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2)
-    val B = drmParallelize(inCoreB, numPartitions = 2)
-
-    val C = A + B
-    val inCoreC = C.collect
-
-    // Actual
-    val inCoreCControl = inCoreA + inCoreB
-
-    (inCoreC - inCoreCControl).norm should be < 1E-10
-  }
-
-  test("C = A + B, identically partitioned") {
-
-    val inCoreA = dense((1, 2, 3), (3, 4, 5), (5, 6, 7))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2)
-
-//    printf("A.nrow=%d.\n", A.rdd.count())
-
-    // Create B which would be identically partitioned to A. mapBlock() by default will do the trick.
-    val B = A.mapBlock() {
-      case (keys, block) =>
-        val bBlock = block.like() := { (r, c, v) => util.Random.nextDouble()}
-        keys -> bBlock
-    }
-        // Prevent repeated computation non-determinism
-        // removing this checkpoint() will cause the same error in spark Tests
-        // as we're seeing in Flink with this test.  ie  util.Random.nextDouble()
-        // is being called more than once (note that it is not seeded in the closure)
-        .checkpoint()
-
-    val inCoreB = B.collect
-
-    printf("A=\n%s\n", inCoreA)
-    printf("B=\n%s\n", inCoreB)
-
-    val C = A + B
-
-    val inCoreC = C.collect
-
-    printf("C=\n%s\n", inCoreC)
-
-    // Actual
-    val inCoreCControl = inCoreA + inCoreB
-
-    (inCoreC - inCoreCControl).norm should be < 1E-10
-  }
-
-
-  test("C = A + B side test 1") {
-
-    val inCoreA = dense((1, 2), (3, 4))
-    val inCoreB = dense((3, 5), (4, 6))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2)
-    val B = drmParallelize(inCoreB, numPartitions = 2)
-
-    val C = A + B
-    val inCoreC = C.collect
-
-    val inCoreD = (A + B).collect
-
-    // Actual
-    val inCoreCControl = inCoreA + inCoreB
-
-    (inCoreC - inCoreCControl).norm should be < 1E-10
-    (inCoreD - inCoreCControl).norm should be < 1E-10
-  }
-
-  test("C = A + B side test 2") {
-
-    val inCoreA = dense((1, 2), (3, 4))
-    val inCoreB = dense((3, 5), (4, 6))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2).checkpoint()
-    val B = drmParallelize(inCoreB, numPartitions = 2)
-
-    val C = A + B
-    val inCoreC = C.collect
-
-    val inCoreD = (A + B).collect
-
-    // Actual
-    val inCoreCControl = inCoreA + inCoreB
-
-    (inCoreC - inCoreCControl).norm should be < 1E-10
-    (inCoreD - inCoreCControl).norm should be < 1E-10
-  }
-
-  test("C = A + B side test 3") {
-
-    val inCoreA = dense((1, 2), (3, 4))
-    val inCoreB = dense((3, 5), (4, 6))
-
-    val B = drmParallelize(inCoreB, numPartitions = 2)
-    //    val A = (drmParallelize(inCoreA, numPartitions = 2) + B).checkpoint(CacheHint.MEMORY_ONLY_SER)
-    val A = (drmParallelize(inCoreA, numPartitions = 2) + B).checkpoint(CacheHint.MEMORY_ONLY)
-
-    val C = A + B
-    val inCoreC = C.collect
-
-    val inCoreD = (A + B).collect
-
-    // Actual
-    val inCoreCControl = inCoreA + inCoreB * 2.0
-
-    (inCoreC - inCoreCControl).norm should be < 1E-10
-    (inCoreD - inCoreCControl).norm should be < 1E-10
-  }
-
-  test("Ax") {
-    val inCoreA = dense(
-      (1, 2),
-      (3, 4),
-      (20, 30)
-    )
-    val x = dvec(10, 3)
-
-    val drmA = drmParallelize(inCoreA, numPartitions = 2)
-
-    val ax = (drmA %*% x).collect(::, 0)
-
-    ax should equal(inCoreA %*% x)
-  }
-
-  test("A'x") {
-    val inCoreA = dense(
-      (1, 2),
-      (3, 4),
-      (20, 30)
-    )
-    val x = dvec(10, 3, 4)
-
-    val drmA = drmParallelize(inCoreA, numPartitions = 2)
-
-    mahoutCtx.optimizerRewrite(drmA.t %*% x) should equal(OpAtx(drmA, x))
-
-    val atx = (drmA.t %*% x).collect(::, 0)
-
-    atx should equal(inCoreA.t %*% x)
-  }
-
-  test("colSums, colMeans") {
-    val inCoreA = dense(
-      (1, 2),
-      (3, 4),
-      (20, 30)
-    )
-    val drmA = drmParallelize(inCoreA, numPartitions = 2)
-
-    drmA.colSums() should equal(inCoreA.colSums())
-    drmA.colMeans() should equal(inCoreA.colMeans())
-  }
-
-  test("rowSums, rowMeans") {
-    val inCoreA = dense(
-      (1, 2),
-      (3, 4),
-      (20, 30)
-    )
-    val drmA = drmParallelize(inCoreA, numPartitions = 2)
-
-    drmA.rowSums() should equal(inCoreA.rowSums())
-    drmA.rowMeans() should equal(inCoreA.rowMeans())
-  }
-
-  test("A.diagv") {
-    val inCoreA = dense(
-      (1, 2, 3),
-      (3, 4, 5),
-      (20, 30, 7)
-    )
-    val drmA = drmParallelize(inCoreA, numPartitions = 2)
-
-    drmA.diagv should equal(inCoreA.diagv)
-  }
-
-  test("numNonZeroElementsPerColumn") {
-    val inCoreA = dense(
-      (0, 2),
-      (3, 0),
-      (0, -30)
-
-    )
-    val drmA = drmParallelize(inCoreA, numPartitions = 2)
-
-    drmA.numNonZeroElementsPerColumn() should equal(inCoreA.numNonZeroElementsPerColumn())
-  }
-
-  test("C = A cbind B, cogroup") {
-
-    val inCoreA = dense((1, 2), (3, 4))
-    val inCoreB = dense((3, 5), (4, 6))
-    val controlC = dense((1, 2, 3, 5), (3, 4, 4, 6))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2).checkpoint()
-    val B = drmParallelize(inCoreB, numPartitions = 2).checkpoint()
-
-    (A.cbind(B) -: controlC).norm should be < 1e-10
-
-  }
-
-  test("C = A cbind B, zip") {
-
-    val inCoreA = dense((1, 2), (3, 4))
-    val controlC = dense((1, 2, 2, 3), (3, 4, 4, 5))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2).checkpoint()
-
-    (A.cbind(A + 1.0) -: controlC).norm should be < 1e-10
-
-  }
-
-  test("B = 1 cbind A") {
-    val inCoreA = dense((1, 2), (3, 4))
-    val control = dense((1, 1, 2), (1, 3, 4))
-
-    val drmA = drmParallelize(inCoreA, numPartitions = 2)
-
-    (control - (1 cbind drmA) ).norm should be < 1e-10
-  }
-
-  test("B = A cbind 1") {
-    val inCoreA = dense((1, 2), (3, 4))
-    val control = dense((1, 2, 1), (3, 4, 1))
-
-    val drmA = drmParallelize(inCoreA, numPartitions = 2)
-
-    (control - (drmA cbind 1) ).norm should be < 1e-10
-  }
-
-  test("B = A + 1.0") {
-    val inCoreA = dense((1, 2), (2, 3), (3, 4))
-    val controlB = inCoreA + 1.0
-
-    val drmB = drmParallelize(m = inCoreA, numPartitions = 2) + 1.0
-
-    (drmB -: controlB).norm should be < 1e-10
-  }
-  
-  test("C = A rbind B") {
-
-    val inCoreA = dense((1, 2), (3, 5))
-    val inCoreB = dense((7, 11), (13, 17))
-    val controlC = dense((1, 2), (3, 5), (7, 11), (13, 17))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2).checkpoint()
-    val B = drmParallelize(inCoreB, numPartitions = 2).checkpoint()
-    
-    (A.rbind(B) -: controlC).norm should be < 1e-10
-  }
-
-  test("C = A rbind B, with empty") {
-
-    val inCoreA = dense((1, 2), (3, 5))
-    val emptyB = drmParallelizeEmpty(nrow = 2, ncol = 2, numPartitions = 2)
-    val controlC = dense((1, 2), (3, 5), (0, 0), (0, 0))
-
-    val A = drmParallelize(inCoreA, numPartitions = 2).checkpoint()
-
-    (A.rbind(emptyB) -: controlC).norm should be < 1e-10
-  }
-
-  /** Test dsl overloads over scala operations over matrices */
-  test("scalarOps") {
-    val drmA = drmParallelize(m = dense(
-      (1, 2, 3),
-      (3, 4, 5),
-      (7, 8, 9)
-    ),
-      numPartitions = 2)
-
-    (10 * drmA - (10 *: drmA)).norm shouldBe 0
-
-  }
-
-  test("A * A -> sqr(A) rewrite ") {
-    val mxA = dense(
-      (1, 2, 3),
-      (3, 4, 5),
-      (7, 8, 9)
-    )
-
-    val mxAAControl = mxA * mxA
-
-    val drmA = drmParallelize(mxA, 2)
-    val drmAA = drmA * drmA
-
-    val optimized = drmAA.context.engine.optimizerRewrite(drmAA)
-    println(s"optimized:$optimized")
-    optimized.isInstanceOf[OpAewUnaryFunc[Int]] shouldBe true
-
-    (mxAAControl -= drmAA).norm should be < 1e-10
-  }
-
-  test("B = 1 + 2 * (A * A) ew unary function fusion") {
-    val mxA = dense(
-      (1, 2, 3),
-      (3, 0, 5)
-    )
-    val controlB = mxA.cloned := { (x) => 1 + 2 * x * x}
-
-    val drmA = drmParallelize(mxA, 2)
-
-    // We need to use parenthesis, otherwise optimizer will see it as (2A) * (A) and that would not
-    // be rewritten as 2 * sqr(A). It is not that clever (yet) to try commutativity optimizations.
-    val drmB = 1 + 2 * (drmA * drmA)
-
-    val optimized = mahoutCtx.engine.optimizerRewrite(drmB)
-    println(s"optimizer rewritten:$optimized")
-    optimized.isInstanceOf[OpAewUnaryFuncFusion[Int]] shouldBe true
-
-    (controlB - drmB).norm should be < 1e-10
-
-  }
-
-  test("functional apply()") {
-    val mxA = sparse (
-      (1 -> 3) :: (7 -> 7) :: Nil,
-      (4 -> 5) :: (5 -> 8) :: Nil
-    )
-
-    val mxAControl = mxA cloned
-    val drmA = drmParallelize(mxA)
-
-    (drmA(x => x + 1).collect - (mxAControl + 1)).norm should be < 1e-7
-    (drmA(x => x * 2).collect - (2 * mxAControl)).norm should be < 1e-7
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MahoutCollectionsSuite.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MahoutCollectionsSuite.scala b/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MahoutCollectionsSuite.scala
deleted file mode 100644
index cf62eea..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MahoutCollectionsSuite.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.mahout.math.scalabindings
-
-import org.apache.mahout.math.Vector
-import org.apache.mahout.test.MahoutSuite
-import org.scalatest.FunSuite
-import org.apache.mahout.math.scalabindings.MahoutCollections._
-import org.apache.mahout.math._
-import org.apache.mahout.math.scalabindings.RLikeOps._
-
-class MahoutCollectionsSuite extends FunSuite with MahoutSuite {
-  test("toArray") {
-    val a = Array(1.0, 2.0, 3.0)
-    val v: Vector = new org.apache.mahout.math.DenseVector(a)
-
-    v.toArray.deep shouldBe a.deep
-
-  }
-
-  test("toMap") {
-    val m = Map( (1 -> 1.0), (3 -> 3.0))
-    val sv = svec(m)
-
-    sv.toMap shouldBe m
-  }
-}

http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MathSuite.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MathSuite.scala b/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MathSuite.scala
deleted file mode 100644
index 9e93e63..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MathSuite.scala
+++ /dev/null
@@ -1,267 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.mahout.math.scalabindings
-
-import org.apache.log4j.Level
-
-import org.apache.mahout.logging._
-import org.apache.mahout.math._
-import org.apache.mahout.math.scalabindings.RLikeOps._
-import org.apache.mahout.test.MahoutSuite
-import org.scalatest.FunSuite
-
-import scala.math._
-
-class MathSuite extends FunSuite with MahoutSuite {
-
-  private final implicit val log = getLog(classOf[MathSuite])
-
-  test("chol") {
-
-    // try to solve Ax=b with cholesky:
-    // this requires
-    // (LL')x = B
-    // L'x= (L^-1)B
-    // x=(L'^-1)(L^-1)B
-
-    val a = dense((1, 2, 3), (2, 3, 4), (3, 4, 5.5))
-
-    // make sure it is symmetric for a valid solution
-    a := a.t %*% a
-
-    trace(s"A= \n$a")
-
-    val b = dense((9, 8, 7)).t
-
-    trace(s"b = \n$b")
-
-    // Fails if chol(a, true)
-    val ch = chol(a)
-
-    trace(s"L = \n${ch.getL}")
-
-    trace(s"(L^-1)b =\n${ch.solveLeft(b)}\n")
-
-    val x = ch.solveRight(eye(3)) %*% ch.solveLeft(b)
-
-    trace(s"x = \n$x")
-
-    val axmb = (a %*% x) - b
-
-    trace(s"AX - B = \n$axmb")
-
-    axmb.norm should be < 1e-10
-
-  }
-
-  test("chol2") {
-
-    val vtv = new DenseSymmetricMatrix(
-      Array(
-        0.0021401286568947376, 0.001309251254596442, 0.0016003218703045058,
-        0.001545407014131058, 0.0012772546647977234,
-        0.001747768702674435
-      ), true)
-
-    printf("V'V=\n%s\n", vtv cloned)
-
-    val vblock = dense(
-      (0.0012356809018514347, 0.006141139195280868, 8.037742467936037E-4),
-      (0.007910767859830255, 0.007989899899005457, 0.006877961936587515),
-      (0.007011211118759952, 0.007458865101641882, 0.0048344749320346795),
-      (0.006578789899685284, 0.0010812485516549452, 0.0062146270886981655)
-    )
-
-    val d = diag(15.0, 4)
-
-
-    val b = dense(
-      0.36378319648203084,
-      0.3627384439613304,
-      0.2996934112658234)
-
-    printf("B=\n%s\n", b)
-
-
-    val cholArg = vtv + (vblock.t %*% d %*% vblock) + diag(4e-6, 3)
-
-    printf("cholArg=\n%s\n", cholArg)
-
-    printf("V'DV=\n%s\n", vblock.t %*% d %*% vblock)
-
-    printf("V'V+V'DV=\n%s\n", vtv + (vblock.t %*% d %*% vblock))
-
-    val ch = chol(cholArg)
-
-    printf("L=\n%s\n", ch.getL)
-
-    val x = ch.solveRight(eye(cholArg.nrow)) %*% ch.solveLeft(b)
-
-    printf("X=\n%s\n", x)
-
-    assert((cholArg %*% x - b).norm < 1e-10)
-
-  }
-
-  test("qr") {
-    val a = dense((1, 2, 3), (2, 3, 6), (3, 4, 5), (4, 7, 8))
-    val (q, r) = qr(a)
-
-    printf("Q=\n%s\n", q)
-    printf("R=\n%s\n", r)
-
-    for (i <- 0 until q.ncol; j <- i + 1 until q.ncol)
-      assert(abs(q(::, i) dot q(::, j)) < 1e-10)
-  }
-
-  test("solve matrix-vector") {
-    val a = dense((1, 3), (4, 2))
-    val b = dvec(11, 14)
-    val x = solve(a, b)
-
-    val control = dvec(2, 3)
-
-    (control - x).norm(2) should be < 1e-10
-  }
-
-  test("solve matrix-matrix") {
-    val a = dense((1, 3), (4, 2))
-    val b = dense(11, 14)
-    val x = solve(a, b)
-
-    val control = dense(2, 3)
-
-    (control - x).norm should be < 1e-10
-  }
-
-  test("solve to obtain inverse") {
-    val a = dense((1, 3), (4, 2))
-    val x = solve(a)
-
-    val identity = a %*% x
-
-    val control = eye(identity.ncol)
-
-    (control - identity).norm should be < 1e-10
-  }
-
-  test("solve rejects non-square matrix") {
-    intercept[IllegalArgumentException] {
-      val a = dense((1, 2, 3), (4, 5, 6))
-      val b = dvec(1, 2)
-      solve(a, b)
-    }
-  }
-
-  test("solve rejects singular matrix") {
-    intercept[IllegalArgumentException] {
-      val a = dense((1, 2), (2 , 4))
-      val b = dvec(1, 2)
-      solve(a, b)
-    }
-  }
-
-  test("svd") {
-
-    val a = dense((1, 2, 3), (3, 4, 5))
-
-    val (u, v, s) = svd(a)
-
-    printf("U:\n%s\n", u.toString)
-    printf("V:\n%s\n", v.toString)
-    printf("Sigma:\n%s\n", s.toString)
-
-    val aBar = u %*% diagv(s) %*% v.t
-
-    val amab = a - aBar
-
-    printf("A-USV'=\n%s\n", amab.toString)
-
-    assert(amab.norm < 1e-10)
-
-  }
-
-  test("random uniform") {
-    val omega1 = Matrices.symmetricUniformView(2, 3, 1234)
-    val omega2 = Matrices.symmetricUniformView(2, 3, 1234)
-
-    val a = sparse(
-      0 -> 1 :: 1 -> 2 :: Nil,
-      0 -> 3 :: 1 -> 4 :: Nil,
-      0 -> 2 :: 1 -> 0.0 :: Nil
-    )
-
-    val block = a(0 to 0, ::).cloned
-    val block2 = a(1 to 1, ::).cloned
-
-    (block %*% omega1 - (a %*% omega2)(0 to 0, ::)).norm should be < 1e-7
-    (block2 %*% omega1 - (a %*% omega2)(1 to 1, ::)).norm should be < 1e-7
-
-  }
-
-  test("sqDist(X,Y)") {
-    val m = 100
-    val n = 300
-    val d = 7
-    val mxX = Matrices.symmetricUniformView(m, d, 12345).cloned -= 5
-    val mxY = Matrices.symmetricUniformView(n, d, 1234).cloned += 10
-
-    val mxDsq = sqDist(mxX, mxY)
-    val mxDsqControl = new DenseMatrix(m, n) := { (r, c, _) ⇒ (mxX(r, ::) - mxY(c, ::)) ^= 2 sum }
-    (mxDsq - mxDsqControl).norm should be < 1e-7
-  }
-
-  test("sqDist(X)") {
-    val m = 100
-    val d = 7
-    val mxX = Matrices.symmetricUniformView(m, d, 12345).cloned -= 5
-
-    val mxDsq = sqDist(mxX)
-    val mxDsqControl = sqDist(mxX, mxX)
-    (mxDsq - mxDsqControl).norm should be < 1e-7
-  }
-
-  test("sparsity analysis") {
-    setLogLevel(Level.DEBUG)
-
-    val m = 500
-    val n = 800
-    val mxA = new DenseMatrix(m, n)
-
-    densityAnalysis(mxA) shouldBe false
-    densityAnalysis(mxA, .5) shouldBe false
-    densityAnalysis(mxA + 1) shouldBe true
-    densityAnalysis(mxA + 1, .95) shouldBe true
-
-    for (i ← 0 until m by 5) mxA(i, ::) := 1
-    info(s"20% detected as dense?:${densityAnalysis(mxA)}")
-    mxA := 0
-
-    for (i ← 0 until m by 3) mxA(i, ::) := 1
-    info(s"33% detected as dense?:${densityAnalysis(mxA)}")
-    mxA := 0
-
-    for (i ← 0 until m by 4) mxA(i, ::) := 1
-    info(s"25% detected as dense?:${densityAnalysis(mxA)}")
-
-    for (i ← 0 until m by 2) mxA(i, ::) := 1
-    info(s"50% detected as dense?:${densityAnalysis(mxA)}")
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MatlabLikeMatrixOpsSuite.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MatlabLikeMatrixOpsSuite.scala b/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MatlabLikeMatrixOpsSuite.scala
deleted file mode 100644
index 547f710..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MatlabLikeMatrixOpsSuite.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.mahout.math.scalabindings
-
-import org.scalatest.FunSuite
-import MatlabLikeOps._
-import scala.Predef._
-import org.apache.mahout.test.MahoutSuite
-
-class MatlabLikeMatrixOpsSuite extends FunSuite with MahoutSuite {
-
-  test("multiplication") {
-
-    val a = dense((1, 2, 3), (3, 4, 5))
-    val b = dense(1, 4, 5)
-    val m = a * b
-
-    assert(m(0, 0) == 24)
-    assert(m(1, 0) == 44)
-    println(m.toString)
-  }
-
-  test("Hadamard") {
-    val a = dense(
-      (1, 2, 3),
-      (3, 4, 5)
-    )
-    val b = dense(
-      (1, 1, 2),
-      (2, 1, 1)
-    )
-
-    val c = a *@ b
-
-    printf("C=\n%s\n", c)
-
-    assert(c(0, 0) == 1)
-    assert(c(1, 2) == 5)
-    println(c.toString)
-
-    val d = a *@ 5.0
-    assert(d(0, 0) == 5)
-    assert(d(1, 1) == 20)
-
-    a *@= b
-    assert(a(0, 0) == 1)
-    assert(a(1, 2) == 5)
-    println(a.toString)
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MatrixOpsSuite.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MatrixOpsSuite.scala b/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MatrixOpsSuite.scala
deleted file mode 100644
index 1296d9e..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/MatrixOpsSuite.scala
+++ /dev/null
@@ -1,228 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.mahout.math.scalabindings
-
-import org.scalatest.{Matchers, FunSuite}
-import RLikeOps._
-import scala._
-import org.apache.mahout.test.MahoutSuite
-import org.apache.mahout.math.{RandomAccessSparseVector, SequentialAccessSparseVector, Matrices}
-import org.apache.mahout.common.RandomUtils
-
-import scala.util.Random
-
-
-class MatrixOpsSuite extends FunSuite with MahoutSuite {
-
-  test("equivalence") {
-    val a = dense((1, 2, 3), (3, 4, 5))
-    val b = dense((1, 2, 3), (3, 4, 5))
-    val c = dense((1, 4, 3), (3, 4, 5))
-    assert(a === b)
-    assert(a !== c)
-  }
-
-  test("elementwise plus, minus") {
-    val a = dense((1, 2, 3), (3, 4, 5))
-    val b = dense((1, 1, 2), (2, 1, 1))
-
-    val c = a + b
-    assert(c(0, 0) == 2)
-    assert(c(1, 2) == 6)
-    println(c.toString)
-  }
-
-  test("matrix, vector slicing") {
-
-    val a = dense((1, 2, 3), (3, 4, 5))
-
-    assert(a(::, 0).sum == 4)
-    assert(a(1, ::).sum == 12)
-
-    assert(a(0 to 1, 1 to 2).sum == 14)
-
-    // assign to slice-vector
-    a(0, 0 to 1) :=(3, 5)
-    // or
-    a(0, 0 to 1) = (3, 5)
-
-    assert(a(0, ::).sum == 11)
-
-    println(a.toString)
-
-    // assign to a slice-matrix
-    a(0 to 1, 0 to 1) := dense((1, 1), (2, 2.5))
-
-    // or
-    a(0 to 1, 0 to 1) = dense((1, 1), (2, 2.5))
-
-    println(a)
-    println(a.sum)
-
-    val b = dense((1, 2, 3), (3, 4, 5))
-    b(0, ::) -= dvec(1, 2, 3)
-    println(b)
-    b(0, ::) should equal(dvec(0, 0, 0))
-
-  }
-
-  test("assignments") {
-
-    val a = dense((1, 2, 3), (3, 4, 5))
-
-    val b = a cloned
-
-    b(0, 0) = 2.0
-
-    printf("B=\n%s\n", b)
-
-    assert((b - a).norm - 1 < 1e-10)
-
-    val e = eye(5)
-
-    println(s"I(5)=\n$e")
-
-    a(0 to 1, 1 to 2) = dense((3, 2), (2, 3))
-    a(0 to 1, 1 to 2) := dense((3, 2), (2, 3))
-
-    println(s"a=$a")
-
-    a(0 to 1, 1 to 2) := { _ => 45}
-    println(s"a=$a")
-
-//    a(0 to 1, 1 to 2) ::= { _ => 44}
-    println(s"a=$a")
-
-    // Sparse assignment to a sparse block
-    val c = sparse(0 -> 1 :: Nil, 2 -> 2 :: Nil, 1 -> 5 :: Nil)
-    val d = c.cloned
-
-    println(s"d=$d")
-    d.ncol shouldBe 3
-
-    d(::, 1 to 2) ::= { _ => 4}
-    println(s"d=$d")
-    d(::, 1 to 2).sum shouldBe 8
-
-    d ::= {_ => 5}
-    d.sum shouldBe 15
-
-    val f = c.cloned.t
-    f ::= {_ => 6}
-    f.sum shouldBe 18
-
-    val g = c.cloned
-    g(::, 1 until g.nrow) ::= { x => if (x <= 0) 0.0 else 1.0}
-    g.sum shouldBe 3
-  }
-
-  test("functional apply()") {
-    val mxA = sparse (
-      (1 -> 3) :: (7 -> 7) :: Nil,
-      (4 -> 5) :: (5 -> 8) :: Nil
-    )
-    val mxAControl = mxA cloned
-
-    (mxA(x ⇒ x + 1) - (mxAControl + 1)).norm should be < 1e-7
-    (mxA(x ⇒ x * 2) - (2 * mxAControl)).norm should be < 1e-7
-
-  }
-
-  test("sparse") {
-
-    val a = sparse((1, 3) :: Nil,
-      (0, 2) ::(1, 2.5) :: Nil
-    )
-    println(a.toString)
-  }
-
-  test("colSums, rowSums, colMeans, rowMeans, numNonZeroElementsPerColumn") {
-    val a = dense(
-      (2, 3, 4),
-      (3, 4, 5)
-    )
-
-    a.colSums() should equal(dvec(5, 7, 9))
-    a.rowSums() should equal(dvec(9, 12))
-    a.colMeans() should equal(dvec(2.5, 3.5, 4.5))
-    a.rowMeans() should equal(dvec(3, 4))
-    a.numNonZeroElementsPerColumn() should equal(dvec(2,2,2))
-    a.numNonZeroElementsPerRow() should equal(dvec(3,3))
-
-  }
-
-  test("numNonZeroElementsPerColumn and Row") {
-    val a = dense(
-      (2, 3, 4),
-      (3, 4, 5),
-      (-5, 0, -1),
-      (0, 0, 1)
-    )
-
-    a.numNonZeroElementsPerColumn() should equal(dvec(3,2,4))
-    a.numNonZeroElementsPerRow() should equal(dvec(3,3,2,1))
-  }
-
-  test("Vector Assignment performance") {
-
-    val n = 1000
-    val k = (n * 0.1).toInt
-    val nIters = 10000
-
-    val rnd = RandomUtils.getRandom
-
-    val src = new SequentialAccessSparseVector(n)
-    for (i <- 0 until k) src(rnd.nextInt(n)) = rnd.nextDouble()
-
-    val times = (0 until 50).map { i =>
-      val ms = System.currentTimeMillis()
-      var j = 0
-      while (j < nIters) {
-        new SequentialAccessSparseVector(n) := src
-        j += 1
-      }
-      System.currentTimeMillis() - ms
-    }
-
-        .tail
-
-    val avgTime = times.sum.toDouble / times.size
-
-    printf("Average assignment seqSparse2seqSparse time: %.3f ms\n", avgTime)
-
-    val times2 = (0 until 50).map { i =>
-      val ms = System.currentTimeMillis()
-      var j = 0
-      while (j < nIters) {
-        new SequentialAccessSparseVector(n) := (new RandomAccessSparseVector(n) := src)
-        j += 1
-      }
-      System.currentTimeMillis() - ms
-    }
-
-        .tail
-
-    val avgTime2 = times2.sum.toDouble / times2.size
-
-    printf("Average assignment seqSparse2seqSparse via Random Access Sparse time: %.3f ms\n", avgTime2)
-
-  }
-
-
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/RLikeMatrixOpsSuite.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/RLikeMatrixOpsSuite.scala b/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/RLikeMatrixOpsSuite.scala
deleted file mode 100644
index 6dc8207..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/RLikeMatrixOpsSuite.scala
+++ /dev/null
@@ -1,369 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.mahout.math.scalabindings
-
-import java.util
-
-import org.apache.log4j.Level
-import org.apache.mahout.math._
-import org.scalatest.FunSuite
-import RLikeOps._
-import org.apache.mahout.test.MahoutSuite
-import org.apache.mahout.logging._
-import scala.collection.JavaConversions._
-import scala.util.Random
-
-class RLikeMatrixOpsSuite extends FunSuite with MahoutSuite {
-
-  test("multiplication") {
-
-    val a = dense((1, 2, 3), (3, 4, 5))
-    val b = dense(1, 4, 5)
-    val m = a %*% b
-
-    assert(m(0, 0) == 24)
-    assert(m(1, 0) == 44)
-    println(m.toString)
-  }
-
-  test("Hadamard") {
-    val a = dense(
-      (1, 2, 3),
-      (3, 4, 5)
-    )
-    val b = dense(
-      (1, 1, 2),
-      (2, 1, 1)
-    )
-
-    val c = a * b
-
-    printf("C=\n%s\n", c)
-
-    assert(c(0, 0) == 1)
-    assert(c(1, 2) == 5)
-    println(c.toString)
-
-    val d = a * 5.0
-    assert(d(0, 0) == 5)
-    assert(d(1, 1) == 20)
-
-    a *= b
-    assert(a(0, 0) == 1)
-    assert(a(1, 2) == 5)
-    println(a.toString)
-
-  }
-
-  test("Uniform view") {
-    val mxUnif = Matrices.symmetricUniformView(5000000, 5000000, 1234)
-  }
-
-  /** Test dsl overloads over scala operations over matrices */
-  test ("scalarOps") {
-    val a = dense(
-      (1, 2, 3),
-      (3, 4, 5)
-    )
-
-    (10 * a - (10 *: a)).norm shouldBe 0
-    (10 + a - (10 +: a)).norm shouldBe 0
-    (10 - a - (10 -: a)).norm shouldBe 0
-    (10 / a - (10 /: a)).norm shouldBe 0
-
-  }
-
-  test("Multiplication experimental performance") {
-
-    getLog(MMul.getClass).setLevel(Level.DEBUG)
-
-    val d = 300
-    val n = 3
-
-    // Dense row-wise
-    val mxAd = new DenseMatrix(d, d) := Matrices.gaussianView(d, d, 134) + 1
-    val mxBd = new DenseMatrix(d, d) := Matrices.gaussianView(d, d, 134) - 1
-
-    val rnd = new Random(1234)
-
-    // Sparse rows
-    val mxAsr = (new SparseRowMatrix(d,
-      d) := { _ => if (rnd.nextDouble() < 0.1) rnd.nextGaussian() + 1 else 0.0 }) cloned
-    val mxBsr = (new SparseRowMatrix(d,
-      d) := { _ => if (rnd.nextDouble() < 0.1) rnd.nextGaussian() - 1 else 0.0 }) cloned
-
-    // Hanging sparse rows
-    val mxAs = (new SparseMatrix(d, d) := { _ => if (rnd.nextDouble() < 0.1) rnd.nextGaussian() + 1 else 0.0 }) cloned
-    val mxBs = (new SparseMatrix(d, d) := { _ => if (rnd.nextDouble() < 0.1) rnd.nextGaussian() - 1 else 0.0 }) cloned
-
-    // DIAGONAL
-    val mxD = diagv(dvec(Array.tabulate(d)(_ => rnd.nextGaussian())))
-
-    def time(op: => Unit): Long = {
-      val ms = System.currentTimeMillis()
-      op
-      System.currentTimeMillis() - ms
-    }
-
-
-    // We're not using GPUMMul or OMPMMul in math-scala so dont need to worry about
-    // changing it in this method
-    def getMmulAvgs(mxA: Matrix, mxB: Matrix, n: Int) = {
-
-      var control: Matrix = null
-      var mmulVal: Matrix = null
-
-      val current = Stream.range(0, n).map { _ => time {control = mxA.times(mxB)} }.sum.toDouble / n
-      val experimental = Stream.range(0, n).map { _ => time {mmulVal = MMul(mxA, mxB, None)} }.sum.toDouble / n
-      (control - mmulVal).norm should be < 1e-10
-      current -> experimental
-    }
-
-    // Dense matrix tests.
-    println(s"Ad %*% Bd: ${getMmulAvgs(mxAd, mxBd, n)}")
-    println(s"Ad(::,::) %*% Bd: ${getMmulAvgs(mxAd(0 until mxAd.nrow,::), mxBd, n)}")
-    println(s"Ad' %*% Bd: ${getMmulAvgs(mxAd.t, mxBd, n)}")
-    println(s"Ad %*% Bd': ${getMmulAvgs(mxAd, mxBd.t, n)}")
-    println(s"Ad' %*% Bd': ${getMmulAvgs(mxAd.t, mxBd.t, n)}")
-    println(s"Ad'' %*% Bd'': ${getMmulAvgs(mxAd.t.t, mxBd.t.t, n)}")
-    println
-
-    // Sparse row matrix tests.
-    println(s"Asr %*% Bsr: ${getMmulAvgs(mxAsr, mxBsr, n)}")
-    println(s"Asr' %*% Bsr: ${getMmulAvgs(mxAsr.t, mxBsr, n)}")
-    println(s"Asr %*% Bsr': ${getMmulAvgs(mxAsr, mxBsr.t, n)}")
-    println(s"Asr' %*% Bsr': ${getMmulAvgs(mxAsr.t, mxBsr.t, n)}")
-    println(s"Asr'' %*% Bsr'': ${getMmulAvgs(mxAsr.t.t, mxBsr.t.t, n)}")
-    println
-
-    // Sparse matrix tests.
-    println(s"Asm %*% Bsm: ${getMmulAvgs(mxAs, mxBs, n)}")
-    println(s"Asm' %*% Bsm: ${getMmulAvgs(mxAs.t, mxBs, n)}")
-    println(s"Asm %*% Bsm': ${getMmulAvgs(mxAs, mxBs.t, n)}")
-    println(s"Asm' %*% Bsm': ${getMmulAvgs(mxAs.t, mxBs.t, n)}")
-    println(s"Asm'' %*% Bsm'': ${getMmulAvgs(mxAs.t.t, mxBs.t.t, n)}")
-    println
-
-    // Mixed sparse matrix tests.
-    println(s"Asm %*% Bsr: ${getMmulAvgs(mxAs, mxBsr, n)}")
-    println(s"Asm' %*% Bsr: ${getMmulAvgs(mxAs.t, mxBsr, n)}")
-    println(s"Asm %*% Bsr': ${getMmulAvgs(mxAs, mxBsr.t, n)}")
-    println(s"Asm' %*% Bsr': ${getMmulAvgs(mxAs.t, mxBsr.t, n)}")
-    println(s"Asm'' %*% Bsr'': ${getMmulAvgs(mxAs.t.t, mxBsr.t.t, n)}")
-    println
-
-    println(s"Asr %*% Bsm: ${getMmulAvgs(mxAsr, mxBs, n)}")
-    println(s"Asr' %*% Bsm: ${getMmulAvgs(mxAsr.t, mxBs, n)}")
-    println(s"Asr %*% Bsm': ${getMmulAvgs(mxAsr, mxBs.t, n)}")
-    println(s"Asr' %*% Bsm': ${getMmulAvgs(mxAsr.t, mxBs.t, n)}")
-    println(s"Asr'' %*% Bsm'': ${getMmulAvgs(mxAsr.t.t, mxBs.t.t, n)}")
-    println
-
-    // Mixed dense/sparse
-    println(s"Ad %*% Bsr: ${getMmulAvgs(mxAd, mxBsr, n)}")
-    println(s"Ad' %*% Bsr: ${getMmulAvgs(mxAd.t, mxBsr, n)}")
-    println(s"Ad %*% Bsr': ${getMmulAvgs(mxAd, mxBsr.t, n)}")
-    println(s"Ad' %*% Bsr': ${getMmulAvgs(mxAd.t, mxBsr.t, n)}")
-    println(s"Ad'' %*% Bsr'': ${getMmulAvgs(mxAd.t.t, mxBsr.t.t, n)}")
-    println
-
-    println(s"Asr %*% Bd: ${getMmulAvgs(mxAsr, mxBd, n)}")
-    println(s"Asr' %*% Bd: ${getMmulAvgs(mxAsr.t, mxBd, n)}")
-    println(s"Asr %*% Bd': ${getMmulAvgs(mxAsr, mxBd.t, n)}")
-    println(s"Asr' %*% Bd': ${getMmulAvgs(mxAsr.t, mxBd.t, n)}")
-    println(s"Asr'' %*% Bd'': ${getMmulAvgs(mxAsr.t.t, mxBd.t.t, n)}")
-    println
-
-    println(s"Ad %*% Bsm: ${getMmulAvgs(mxAd, mxBs, n)}")
-    println(s"Ad' %*% Bsm: ${getMmulAvgs(mxAd.t, mxBs, n)}")
-    println(s"Ad %*% Bsm': ${getMmulAvgs(mxAd, mxBs.t, n)}")
-    println(s"Ad' %*% Bsm': ${getMmulAvgs(mxAd.t, mxBs.t, n)}")
-    println(s"Ad'' %*% Bsm'': ${getMmulAvgs(mxAd.t.t, mxBs.t.t, n)}")
-    println
-
-    println(s"Asm %*% Bd: ${getMmulAvgs(mxAs, mxBd, n)}")
-    println(s"Asm' %*% Bd: ${getMmulAvgs(mxAs.t, mxBd, n)}")
-    println(s"Asm %*% Bd': ${getMmulAvgs(mxAs, mxBd.t, n)}")
-    println(s"Asm' %*% Bd': ${getMmulAvgs(mxAs.t, mxBd.t, n)}")
-    println(s"Asm'' %*% Bd'': ${getMmulAvgs(mxAs.t.t, mxBd.t.t, n)}")
-    println
-
-    // Diagonal cases
-    println(s"Ad %*% D: ${getMmulAvgs(mxAd, mxD, n)}")
-    println(s"Asr %*% D: ${getMmulAvgs(mxAsr, mxD, n)}")
-    println(s"Asm %*% D: ${getMmulAvgs(mxAs, mxD, n)}")
-    println(s"D %*% Ad: ${getMmulAvgs(mxD, mxAd, n)}")
-    println(s"D %*% Asr: ${getMmulAvgs(mxD, mxAsr, n)}")
-    println(s"D %*% Asm: ${getMmulAvgs(mxD, mxAs, n)}")
-    println
-
-    println(s"Ad' %*% D: ${getMmulAvgs(mxAd.t, mxD, n)}")
-    println(s"Asr' %*% D: ${getMmulAvgs(mxAsr.t, mxD, n)}")
-    println(s"Asm' %*% D: ${getMmulAvgs(mxAs.t, mxD, n)}")
-    println(s"D %*% Ad': ${getMmulAvgs(mxD, mxAd.t, n)}")
-    println(s"D %*% Asr': ${getMmulAvgs(mxD, mxAsr.t, n)}")
-    println(s"D %*% Asm': ${getMmulAvgs(mxD, mxAs.t, n)}")
-    println
-
-    // Self-squared cases
-    println(s"Ad %*% Ad': ${getMmulAvgs(mxAd, mxAd.t, n)}")
-    println(s"Ad' %*% Ad: ${getMmulAvgs(mxAd.t, mxAd, n)}")
-    println(s"Ad' %*% Ad'': ${getMmulAvgs(mxAd.t, mxAd.t.t, n)}")
-    println(s"Ad'' %*% Ad': ${getMmulAvgs(mxAd.t.t, mxAd.t, n)}")
-
-  }
-
-
-  test("elementwise experimental performance") {
-
-    val d = 500
-    val n = 3
-
-    // Dense row-wise
-    val mxAd = new DenseMatrix(d, d) := Matrices.gaussianView(d, d, 134) + 1
-    val mxBd = new DenseMatrix(d, d) := Matrices.gaussianView(d, d, 134) - 1
-
-    val rnd = new Random(1234)
-
-    // Sparse rows
-    val mxAsr = (new SparseRowMatrix(d,
-      d) := { _ => if (rnd.nextDouble() < 0.1) rnd.nextGaussian() + 1 else 0.0 }) cloned
-    val mxBsr = (new SparseRowMatrix(d,
-      d) := { _ => if (rnd.nextDouble() < 0.1) rnd.nextGaussian() - 1 else 0.0 }) cloned
-
-    // Hanging sparse rows
-    val mxAs = (new SparseMatrix(d, d) := { _ => if (rnd.nextDouble() < 0.1) rnd.nextGaussian() + 1 else 0.0 }) cloned
-    val mxBs = (new SparseMatrix(d, d) := { _ => if (rnd.nextDouble() < 0.1) rnd.nextGaussian() - 1 else 0.0 }) cloned
-
-    // DIAGONAL
-    val mxD = diagv(dvec(Array.tabulate(d)(_ => rnd.nextGaussian())))
-
-    def time(op: => Unit): Long = {
-      val ms = System.currentTimeMillis()
-      op
-      System.currentTimeMillis() - ms
-    }
-
-    def getEWAvgs(mxA: Matrix, mxB: Matrix, n: Int) = {
-
-      var control: Matrix = null
-      var mmulVal: Matrix = null
-
-      val current = Stream.range(0, n).map { _ => time {control = mxA + mxB} }.sum.toDouble / n
-      val experimental = Stream.range(0, n).map { _ => time {mmulVal = mxA + mxB} }.sum.toDouble / n
-      (control - mmulVal).norm should be < 1e-10
-      current -> experimental
-    }
-
-    // Dense matrix tests.
-    println(s"Ad + Bd: ${getEWAvgs(mxAd, mxBd, n)}")
-    println(s"Ad' + Bd: ${getEWAvgs(mxAd.t, mxBd, n)}")
-    println(s"Ad + Bd': ${getEWAvgs(mxAd, mxBd.t, n)}")
-    println(s"Ad' + Bd': ${getEWAvgs(mxAd.t, mxBd.t, n)}")
-    println(s"Ad'' + Bd'': ${getEWAvgs(mxAd.t.t, mxBd.t.t, n)}")
-    println
-
-    // Sparse row matrix tests.
-    println(s"Asr + Bsr: ${getEWAvgs(mxAsr, mxBsr, n)}")
-    println(s"Asr' + Bsr: ${getEWAvgs(mxAsr.t, mxBsr, n)}")
-    println(s"Asr + Bsr': ${getEWAvgs(mxAsr, mxBsr.t, n)}")
-    println(s"Asr' + Bsr': ${getEWAvgs(mxAsr.t, mxBsr.t, n)}")
-    println(s"Asr'' + Bsr'': ${getEWAvgs(mxAsr.t.t, mxBsr.t.t, n)}")
-    println
-
-    // Sparse matrix tests.
-    println(s"Asm + Bsm: ${getEWAvgs(mxAs, mxBs, n)}")
-    println(s"Asm' + Bsm: ${getEWAvgs(mxAs.t, mxBs, n)}")
-    println(s"Asm + Bsm': ${getEWAvgs(mxAs, mxBs.t, n)}")
-    println(s"Asm' + Bsm': ${getEWAvgs(mxAs.t, mxBs.t, n)}")
-    println(s"Asm'' + Bsm'': ${getEWAvgs(mxAs.t.t, mxBs.t.t, n)}")
-    println
-
-    // Mixed sparse matrix tests.
-    println(s"Asm + Bsr: ${getEWAvgs(mxAs, mxBsr, n)}")
-    println(s"Asm' + Bsr: ${getEWAvgs(mxAs.t, mxBsr, n)}")
-    println(s"Asm + Bsr': ${getEWAvgs(mxAs, mxBsr.t, n)}")
-    println(s"Asm' + Bsr': ${getEWAvgs(mxAs.t, mxBsr.t, n)}")
-    println(s"Asm'' + Bsr'': ${getEWAvgs(mxAs.t.t, mxBsr.t.t, n)}")
-    println
-
-    println(s"Asr + Bsm: ${getEWAvgs(mxAsr, mxBs, n)}")
-    println(s"Asr' + Bsm: ${getEWAvgs(mxAsr.t, mxBs, n)}")
-    println(s"Asr + Bsm': ${getEWAvgs(mxAsr, mxBs.t, n)}")
-    println(s"Asr' + Bsm': ${getEWAvgs(mxAsr.t, mxBs.t, n)}")
-    println(s"Asr'' + Bsm'': ${getEWAvgs(mxAsr.t.t, mxBs.t.t, n)}")
-    println
-
-    // Mixed dense/sparse
-    println(s"Ad + Bsr: ${getEWAvgs(mxAd, mxBsr, n)}")
-    println(s"Ad' + Bsr: ${getEWAvgs(mxAd.t, mxBsr, n)}")
-    println(s"Ad + Bsr': ${getEWAvgs(mxAd, mxBsr.t, n)}")
-    println(s"Ad' + Bsr': ${getEWAvgs(mxAd.t, mxBsr.t, n)}")
-    println(s"Ad'' + Bsr'': ${getEWAvgs(mxAd.t.t, mxBsr.t.t, n)}")
-    println
-
-    println(s"Asr + Bd: ${getEWAvgs(mxAsr, mxBd, n)}")
-    println(s"Asr' + Bd: ${getEWAvgs(mxAsr.t, mxBd, n)}")
-    println(s"Asr + Bd': ${getEWAvgs(mxAsr, mxBd.t, n)}")
-    println(s"Asr' + Bd': ${getEWAvgs(mxAsr.t, mxBd.t, n)}")
-    println(s"Asr'' + Bd'': ${getEWAvgs(mxAsr.t.t, mxBd.t.t, n)}")
-    println
-
-    println(s"Ad + Bsm: ${getEWAvgs(mxAd, mxBs, n)}")
-    println(s"Ad' + Bsm: ${getEWAvgs(mxAd.t, mxBs, n)}")
-    println(s"Ad + Bsm': ${getEWAvgs(mxAd, mxBs.t, n)}")
-    println(s"Ad' + Bsm': ${getEWAvgs(mxAd.t, mxBs.t, n)}")
-    println(s"Ad'' + Bsm'': ${getEWAvgs(mxAd.t.t, mxBs.t.t, n)}")
-    println
-
-    println(s"Asm + Bd: ${getEWAvgs(mxAs, mxBd, n)}")
-    println(s"Asm' + Bd: ${getEWAvgs(mxAs.t, mxBd, n)}")
-    println(s"Asm + Bd': ${getEWAvgs(mxAs, mxBd.t, n)}")
-    println(s"Asm' + Bd': ${getEWAvgs(mxAs.t, mxBd.t, n)}")
-    println(s"Asm'' + Bd'': ${getEWAvgs(mxAs.t.t, mxBd.t.t, n)}")
-    println
-
-    // Diagonal cases
-    println(s"Ad + D: ${getEWAvgs(mxAd, mxD, n)}")
-    println(s"Asr + D: ${getEWAvgs(mxAsr, mxD, n)}")
-    println(s"Asm + D: ${getEWAvgs(mxAs, mxD, n)}")
-    println(s"D + Ad: ${getEWAvgs(mxD, mxAd, n)}")
-    println(s"D + Asr: ${getEWAvgs(mxD, mxAsr, n)}")
-    println(s"D + Asm: ${getEWAvgs(mxD, mxAs, n)}")
-    println
-
-    println(s"Ad' + D: ${getEWAvgs(mxAd.t, mxD, n)}")
-    println(s"Asr' + D: ${getEWAvgs(mxAsr.t, mxD, n)}")
-    println(s"Asm' + D: ${getEWAvgs(mxAs.t, mxD, n)}")
-    println(s"D + Ad': ${getEWAvgs(mxD, mxAd.t, n)}")
-    println(s"D + Asr': ${getEWAvgs(mxD, mxAsr.t, n)}")
-    println(s"D + Asm': ${getEWAvgs(mxD, mxAs.t, n)}")
-    println
-
-  }
-
-  test("dense-view-debug") {
-    val d = 500
-    // Dense row-wise
-    val mxAd = new DenseMatrix(d, d) := Matrices.gaussianView(d, d, 134) + 1
-    val mxBd = new DenseMatrix(d, d) := Matrices.gaussianView(d, d, 134) - 1
-
-    mxAd(0 until mxAd.nrow, ::) %*% mxBd
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/mahout/blob/99a5358f/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/RLikeVectorOpsSuite.scala
----------------------------------------------------------------------
diff --git a/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/RLikeVectorOpsSuite.scala b/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/RLikeVectorOpsSuite.scala
deleted file mode 100644
index f17f08a..0000000
--- a/math-scala/src/test/scala/org/apache/mahout/math/scalabindings/RLikeVectorOpsSuite.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.mahout.math.scalabindings
-
-import org.apache.log4j.{BasicConfigurator, Level}
-import org.apache.mahout.logging._
-import org.apache.mahout.math._
-import org.apache.mahout.math.scalabindings.RLikeOps._
-import org.apache.mahout.test.MahoutSuite
-import org.scalatest.FunSuite
-
-class RLikeVectorOpsSuite extends FunSuite with MahoutSuite {
-
-  BasicConfigurator.configure()
-  private[scalabindings] final implicit val log = getLog(classOf[RLikeVectorOpsSuite])
-  setLogLevel(Level.DEBUG)
-
-  test("Hadamard") {
-    val a: Vector = (1, 2, 3)
-    val b = (3, 4, 5)
-
-    val c = a * b
-    println(c)
-    assert(c ===(3, 8, 15))
-  }
-
-  test("dot-view performance") {
-
-    val dv1 = new DenseVector(500) := Matrices.uniformView(1, 500, 1234)(0, ::)
-    val dv2 = new DenseVector(500) := Matrices.uniformView(1, 500, 1244)(0, ::)
-
-    val nit = 300000
-
-    // warm up
-    dv1 dot dv2
-
-    val dmsStart = System.currentTimeMillis()
-    for (i ← 0 until nit)
-      dv1 dot dv2
-    val dmsMs = System.currentTimeMillis() - dmsStart
-
-    val (dvv1, dvv2) = dv1(0 until dv1.length) → dv2(0 until dv2.length)
-
-    // Warm up.
-    dvv1 dot dvv2
-
-    val dvmsStart = System.currentTimeMillis()
-    for (i ← 0 until nit)
-      dvv1 dot dvv2
-    val dvmsMs = System.currentTimeMillis() - dvmsStart
-
-    debug(f"dense vector dots:${dmsMs}%.2f ms.")
-    debug(f"dense view dots:${dvmsMs}%.2f ms.")
-
-  }
-
-}


Mime
View raw message