spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From r...@apache.org
Subject [2/6] git commit: Since getLong() and getInt() have side effect, get back parentheses, and remove an empty line
Date Thu, 16 Jan 2014 04:15:46 GMT
Since getLong() and getInt() have side effect, get back parentheses, and remove an empty line


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/12386b3e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/12386b3e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/12386b3e

Branch: refs/heads/master
Commit: 12386b3eea5db7be002b4ba620f3e242bb8ef332
Parents: 0d94d74
Author: Frank Dai <soulmachine@gmail.com>
Authored: Tue Jan 14 14:53:10 2014 +0800
Committer: Frank Dai <soulmachine@gmail.com>
Committed: Tue Jan 14 14:53:10 2014 +0800

----------------------------------------------------------------------
 .../spark/mllib/api/python/PythonMLLibAPI.scala   | 18 +++++++++---------
 .../spark/mllib/clustering/KMeansModel.scala      |  1 -
 2 files changed, 9 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/12386b3e/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala b/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala
index 9ec6019..8520756 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala
@@ -36,11 +36,11 @@ class PythonMLLibAPI extends Serializable {
     }
     val bb = ByteBuffer.wrap(bytes)
     bb.order(ByteOrder.nativeOrder())
-    val magic = bb.getLong
+    val magic = bb.getLong()
     if (magic != 1) {
       throw new IllegalArgumentException("Magic " + magic + " is wrong.")
     }
-    val length = bb.getLong
+    val length = bb.getLong()
     if (packetLength != 16 + 8 * length) {
       throw new IllegalArgumentException("Length " + length + " is wrong.")
     }
@@ -69,12 +69,12 @@ class PythonMLLibAPI extends Serializable {
     }
     val bb = ByteBuffer.wrap(bytes)
     bb.order(ByteOrder.nativeOrder())
-    val magic = bb.getLong
+    val magic = bb.getLong()
     if (magic != 2) {
       throw new IllegalArgumentException("Magic " + magic + " is wrong.")
     }
-    val rows = bb.getLong
-    val cols = bb.getLong
+    val rows = bb.getLong()
+    val cols = bb.getLong()
     if (packetLength != 24 + 8 * rows * cols) {
       throw new IllegalArgumentException("Size " + rows + "x" + cols + " is wrong.")
     }
@@ -198,8 +198,8 @@ class PythonMLLibAPI extends Serializable {
   private def unpackRating(ratingBytes: Array[Byte]): Rating = {
     val bb = ByteBuffer.wrap(ratingBytes)
     bb.order(ByteOrder.nativeOrder())
-    val user = bb.getInt
-    val product = bb.getInt
+    val user = bb.getInt()
+    val product = bb.getInt()
     val rating = bb.getDouble
     new Rating(user, product, rating)
   }
@@ -208,8 +208,8 @@ class PythonMLLibAPI extends Serializable {
   private[spark] def unpackTuple(tupleBytes: Array[Byte]): (Int, Int) = {
     val bb = ByteBuffer.wrap(tupleBytes)
     bb.order(ByteOrder.nativeOrder())
-    val v1 = bb.getInt
-    val v2 = bb.getInt
+    val v1 = bb.getInt()
+    val v2 = bb.getInt()
     (v1, v2)
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/12386b3e/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala
index f770707..980be93 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala
@@ -38,6 +38,5 @@ class KMeansModel(val clusterCenters: Array[Array[Double]]) extends Serializable
    */
   def computeCost(data: RDD[Array[Double]]): Double = {
     data.map(p => KMeans.pointCost(clusterCenters, p)).sum()
-
   }
 }


Mime
View raw message