predictionio-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject [42/50] [abbrv] incubator-predictionio git commit: Add a rule WhitespaceEndOfLineChecker
Date Mon, 27 Jun 2016 19:31:56 GMT
Add a rule WhitespaceEndOfLineChecker


Project: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/commit/7a40f737
Tree: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/tree/7a40f737
Diff: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/diff/7a40f737

Branch: refs/heads/master
Commit: 7a40f737f50fe67b4eb227e36c0dbbd41bae80cd
Parents: eb3fe34
Author: hyukjinkwon <gurwls223@gmail.com>
Authored: Sun Apr 3 14:31:08 2016 +0900
Committer: hyukjinkwon <gurwls223@gmail.com>
Committed: Sun Apr 3 14:31:08 2016 +0900

----------------------------------------------------------------------
 .../io/prediction/controller/Deployment.scala   |  2 +-
 .../scala/io/prediction/controller/Engine.scala |  4 +--
 .../io/prediction/controller/Evaluation.scala   | 10 +++---
 .../prediction/controller/FastEvalEngine.scala  | 38 ++++++++++----------
 .../io/prediction/controller/LServing.scala     |  2 +-
 .../scala/io/prediction/controller/Metric.scala | 14 ++++----
 .../prediction/controller/MetricEvaluator.scala | 14 ++++----
 .../io/prediction/controller/package.scala      |  2 +-
 .../scala/io/prediction/core/BaseEngine.scala   |  8 ++---
 .../io/prediction/core/BaseEvaluator.scala      |  6 ++--
 .../io/prediction/workflow/CoreWorkflow.scala   |  4 +--
 .../io/prediction/workflow/FakeWorkflow.scala   | 26 +++++++-------
 .../io/prediction/data/storage/BiMap.scala      |  2 +-
 .../data/storage/elasticsearch/ESChannels.scala |  2 +-
 .../data/storage/hbase/PIOHBaseUtil.scala       |  2 +-
 .../data/storage/jdbc/JDBCLEvents.scala         |  4 +--
 .../io/prediction/data/store/LEventStore.scala  |  2 +-
 .../webhooks/mailchimp/MailChimpConnector.scala | 22 ++++++------
 .../prediction/e2/engine/BinaryVectorizer.scala |  5 +--
 scalastyle-config.xml                           |  4 +++
 .../scala/io/prediction/tools/RunWorkflow.scala |  2 +-
 .../prediction/tools/admin/CommandClient.scala  |  2 +-
 .../tools/dashboard/CorsSupport.scala           | 16 ++++-----
 23 files changed, 99 insertions(+), 94 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/core/src/main/scala/io/prediction/controller/Deployment.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/Deployment.scala b/core/src/main/scala/io/prediction/controller/Deployment.scala
index b9d7c08..49e14d5 100644
--- a/core/src/main/scala/io/prediction/controller/Deployment.scala
+++ b/core/src/main/scala/io/prediction/controller/Deployment.scala
@@ -34,7 +34,7 @@ trait Deployment extends EngineFactory {
   }
 
   /** Returns the [[Engine]] contained in this [[Deployment]]. */
-  private [prediction] 
+  private [prediction]
   def engine: BaseEngine[_, _, _, _] = {
     assert(engineSet, "Engine not set")
     _engine

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/core/src/main/scala/io/prediction/controller/Engine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/Engine.scala b/core/src/main/scala/io/prediction/controller/Engine.scala
index 92d630f..bd37d8a 100644
--- a/core/src/main/scala/io/prediction/controller/Engine.scala
+++ b/core/src/main/scala/io/prediction/controller/Engine.scala
@@ -309,7 +309,7 @@ class Engine[TD, EI, PD, Q, P, A](
     *         result, and actual result tuple tuple.
     */
   def eval(
-    sc: SparkContext, 
+    sc: SparkContext,
     engineParams: EngineParams,
     params: WorkflowParams)
   : Seq[(EI, RDD[(Q, P, A)])] = {
@@ -762,7 +762,7 @@ object Engine {
       algoMap.mapValues(_.trainBase(sc,pd))
     }}
 
-    val suppQAsMap: Map[EX, RDD[(QX, (Q, A))]] = evalQAsMap.mapValues { qas => 
+    val suppQAsMap: Map[EX, RDD[(QX, (Q, A))]] = evalQAsMap.mapValues { qas =>
       qas.map { case (qx, (q, a)) => (qx, (serving.supplementBase(q), a)) }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/core/src/main/scala/io/prediction/controller/Evaluation.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/Evaluation.scala b/core/src/main/scala/io/prediction/controller/Evaluation.scala
index 8d22464..a6ee9a7 100644
--- a/core/src/main/scala/io/prediction/controller/Evaluation.scala
+++ b/core/src/main/scala/io/prediction/controller/Evaluation.scala
@@ -32,7 +32,7 @@ trait Evaluation extends Deployment {
   protected [this] var _evaluatorSet: Boolean = false
   protected [this] var _evaluator: BaseEvaluator[_, _, _, _, _ <: BaseEvaluatorResult]
= _
 
-  private [prediction] 
+  private [prediction]
   def evaluator: BaseEvaluator[_, _, _, _, _ <: BaseEvaluatorResult] = {
     assert(_evaluatorSet, "Evaluator not set")
     _evaluator
@@ -60,7 +60,7 @@ trait Evaluation extends Deployment {
     */
   def engineEvaluator_=[EI, Q, P, A, R <: BaseEvaluatorResult](
     engineEvaluator: (
-      BaseEngine[EI, Q, P, A], 
+      BaseEngine[EI, Q, P, A],
       BaseEvaluator[EI, Q, P, A, R])) {
     assert(!_evaluatorSet, "Evaluator can be set at most once")
     engine = engineEvaluator._1
@@ -88,7 +88,7 @@ trait Evaluation extends Deployment {
   def engineMetric_=[EI, Q, P, A](
     engineMetric: (BaseEngine[EI, Q, P, A], Metric[EI, Q, P, A, _])) {
     engineEvaluator = (
-      engineMetric._1, 
+      engineMetric._1,
       MetricEvaluator(
         metric = engineMetric._2,
         otherMetrics = Seq[Metric[EI, Q, P, A, _]](),
@@ -112,8 +112,8 @@ trait Evaluation extends Deployment {
     */
   def engineMetrics_=[EI, Q, P, A](
     engineMetrics: (
-      BaseEngine[EI, Q, P, A], 
-      Metric[EI, Q, P, A, _], 
+      BaseEngine[EI, Q, P, A],
+      Metric[EI, Q, P, A, _],
       Seq[Metric[EI, Q, P, A, _]])) {
     engineEvaluator = (
       engineMetrics._1,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala b/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala
index 5eb21b1..8e9727e 100644
--- a/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala
+++ b/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala
@@ -42,11 +42,11 @@ import scala.collection.mutable.{ HashMap => MutableHashMap }
 @Experimental
 object FastEvalEngineWorkflow  {
   @transient lazy val logger = Logger[this.type]
-  
+
   type EX = Int
   type AX = Int
   type QX = Long
- 
+
   case class DataSourcePrefix(dataSourceParams: (String, Params)) {
     def this(pp: PreparatorPrefix) = this(pp.dataSourceParams)
     def this(ap: AlgorithmsPrefix) = this(ap.dataSourceParams)
@@ -60,7 +60,7 @@ object FastEvalEngineWorkflow  {
       this(ap.dataSourceParams, ap.preparatorParams)
     }
   }
-  
+
   case class AlgorithmsPrefix(
     dataSourceParams: (String, Params),
     preparatorParams: (String, Params),
@@ -90,7 +90,7 @@ object FastEvalEngineWorkflow  {
 
     if (!cache.contains(prefix)) {
       val dataSource = Doer(
-        workflow.engine.dataSourceClassMap(prefix.dataSourceParams._1), 
+        workflow.engine.dataSourceClassMap(prefix.dataSourceParams._1),
         prefix.dataSourceParams._2)
 
       val result = dataSource
@@ -130,7 +130,7 @@ object FastEvalEngineWorkflow  {
   def computeAlgorithmsResult[TD, EI, PD, Q, P, A](
     workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
     prefix: AlgorithmsPrefix): Map[EX, RDD[(QX, Seq[P])]] = {
-    
+
     val algoMap: Map[AX, BaseAlgorithm[PD, _, Q, P]] = prefix.algorithmParamsList
       .map { case (algoName, algoParams) => {
         try {
@@ -162,12 +162,12 @@ object FastEvalEngineWorkflow  {
     val algoModelsMap: Map[EX, Map[AX, Any]] = getPreparatorResult(
       workflow,
       new PreparatorPrefix(prefix))
-    .mapValues { 
+    .mapValues {
       pd => algoMap.mapValues(_.trainBase(workflow.sc,pd))
     }
 
     // Predict
-    val dataSourceResult = 
+    val dataSourceResult =
       FastEvalEngineWorkflow.getDataSourceResult(
         workflow = workflow,
         prefix = new DataSourcePrefix(prefix))
@@ -177,22 +177,22 @@ object FastEvalEngineWorkflow  {
     .map { case (ex, (td, ei, iqaRDD)) => {
       val modelsMap: Map[AX, Any] = algoModelsMap(ex)
       val qs: RDD[(QX, Q)] = iqaRDD.mapValues(_._1)
-  
+
       val algoPredicts: Seq[RDD[(QX, (AX, P))]] = (0 until algoCount)
       .map { ax => {
         val algo = algoMap(ax)
         val model = modelsMap(ax)
         val rawPredicts: RDD[(QX, P)] = algo.batchPredictBase(
-          workflow.sc, 
+          workflow.sc,
           model,
           qs)
-    
-        val predicts: RDD[(QX, (AX, P))] = rawPredicts.map { 
+
+        val predicts: RDD[(QX, (AX, P))] = rawPredicts.map {
           case (qx, p) => (qx, (ax, p))
         }
         predicts
       }}
-        
+
       val unionAlgoPredicts: RDD[(QX, Seq[P])] = workflow.sc
       .union(algoPredicts)
       .groupByKey
@@ -205,7 +205,7 @@ object FastEvalEngineWorkflow  {
     }}
     .seq
     .toMap
-    
+
     algoResult
   }
 
@@ -262,13 +262,13 @@ object FastEvalEngineWorkflow  {
     }
     cache(prefix)
   }
-  
+
   def get[TD, EI, PD, Q, P, A](
     workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
     engineParamsList: Seq[EngineParams])
   : Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])] = {
     engineParamsList.map { engineParams => {
-      (engineParams, 
+      (engineParams,
         getServingResult(workflow, new ServingPrefix(engineParams)))
     }}
   }
@@ -286,12 +286,12 @@ class FastEvalEngineWorkflow[TD, EI, PD, Q, P, A](
   val workflowParams: WorkflowParams) extends Serializable {
 
   import io.prediction.controller.FastEvalEngineWorkflow._
-  
+
   type DataSourceResult = Map[EX, (TD, EI, RDD[(QX, (Q, A))])]
   type PreparatorResult = Map[EX, PD]
   type AlgorithmsResult = Map[EX, RDD[(QX, Seq[P])]]
   type ServingResult = Seq[(EI, RDD[(Q, P, A)])]
-  
+
   val dataSourceCache = MutableHashMap[DataSourcePrefix, DataSourceResult]()
   val preparatorCache = MutableHashMap[PreparatorPrefix, PreparatorResult]()
   val algorithmsCache = MutableHashMap[AlgorithmsPrefix, AlgorithmsResult]()
@@ -320,8 +320,8 @@ class FastEvalEngine[TD, EI, PD, Q, P, A](
   @transient override lazy val logger = Logger[this.type]
 
   override def eval(
-    sc: SparkContext, 
-    engineParams: EngineParams, 
+    sc: SparkContext,
+    engineParams: EngineParams,
     params: WorkflowParams): Seq[(EI, RDD[(Q, P, A)])] = {
     logger.info("FastEvalEngine.eval")
     batchEval(sc, Seq(engineParams), params).head._2

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/core/src/main/scala/io/prediction/controller/LServing.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/LServing.scala b/core/src/main/scala/io/prediction/controller/LServing.scala
index c14f3fd..accee48 100644
--- a/core/src/main/scala/io/prediction/controller/LServing.scala
+++ b/core/src/main/scala/io/prediction/controller/LServing.scala
@@ -18,7 +18,7 @@ package io.prediction.controller
 import io.prediction.annotation.Experimental
 import io.prediction.core.BaseServing
 
-/** Base class of serving. 
+/** Base class of serving.
   *
   * @tparam Q Input query class.
   * @tparam P Output prediction class.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/core/src/main/scala/io/prediction/controller/Metric.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/Metric.scala b/core/src/main/scala/io/prediction/controller/Metric.scala
index 89ac490..9e56125 100644
--- a/core/src/main/scala/io/prediction/controller/Metric.scala
+++ b/core/src/main/scala/io/prediction/controller/Metric.scala
@@ -60,11 +60,11 @@ private [prediction] trait StatsMetricHelper[EI, Q, P, A] {
   def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
   : StatCounter = {
     val doubleRDD = sc.union(
-      evalDataSet.map { case (_, qpaRDD) => 
+      evalDataSet.map { case (_, qpaRDD) =>
         qpaRDD.map { case (q, p, a) => calculate(q, p, a) }
       }
     )
-   
+
     doubleRDD.stats()
   }
 }
@@ -75,11 +75,11 @@ private [prediction] trait StatsOptionMetricHelper[EI, Q, P, A] {
   def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
   : StatCounter = {
     val doubleRDD = sc.union(
-      evalDataSet.map { case (_, qpaRDD) => 
+      evalDataSet.map { case (_, qpaRDD) =>
         qpaRDD.flatMap { case (q, p, a) => calculate(q, p, a) }
       }
     )
-   
+
     doubleRDD.stats()
   }
 }
@@ -119,7 +119,7 @@ abstract class AverageMetric[EI, Q, P, A]
   * @group Evaluation
   */
 abstract class OptionAverageMetric[EI, Q, P, A]
-    extends Metric[EI, Q, P, A, Double] 
+    extends Metric[EI, Q, P, A, Double]
     with StatsOptionMetricHelper[EI, Q, P, A]
     with QPAMetric[Q, P, A, Option[Double]] {
   /** Implement this method to return a score that will be used for averaging
@@ -189,7 +189,7 @@ abstract class OptionStdevMetric[EI, Q, P, A]
   }
 }
 
-/** Returns the sum of the score returned by the calculate method. 
+/** Returns the sum of the score returned by the calculate method.
   *
   * @tparam EI Evaluation information
   * @tparam Q Query
@@ -210,7 +210,7 @@ abstract class SumMetric[EI, Q, P, A, R: ClassTag](implicit num: Numeric[R])
   def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
   : R = {
     val union: RDD[R] = sc.union(
-      evalDataSet.map { case (_, qpaRDD) => 
+      evalDataSet.map { case (_, qpaRDD) =>
         qpaRDD.map { case (q, p, a) => calculate(q, p, a) }
       }
     )

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala b/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala
index e28f7ca..41ccc9c 100644
--- a/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala
+++ b/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala
@@ -78,13 +78,13 @@ extends BaseEvaluatorResult {
       new NameParamsSerializer
     write(this)
   }
-  
+
   override def toHTML(): String = html.metric_evaluator().toString()
-  
+
   override def toString: String = {
     implicit lazy val formats = Utils.json4sDefaultFormats +
       new NameParamsSerializer
-    
+
     val bestEPStr = JsonExtractor.engineParamstoPrettyJson(Both, bestEngineParams)
 
     val strings = Seq(
@@ -130,7 +130,7 @@ object MetricEvaluator {
       otherMetrics,
       None)
   }
-  
+
   def apply[EI, Q, P, A, R](metric: Metric[EI, Q, P, A, R])
   : MetricEvaluator[EI, Q, P, A, R] = {
     new MetricEvaluator[EI, Q, P, A, R](
@@ -194,7 +194,7 @@ class MetricEvaluator[EI, Q, P, A, R] (
 
     val now = DateTime.now
     val evalClassName = evaluation.getClass.getName
-    
+
     val variant = MetricEvaluator.EngineVariant(
       id = s"$evalClassName $now",
       description = "",
@@ -221,14 +221,14 @@ class MetricEvaluator[EI, Q, P, A, R] (
     val evalResultList: Seq[(EngineParams, MetricScores[R])] = engineEvalDataSet
     .zipWithIndex
     .par
-    .map { case ((engineParams, evalDataSet), idx) => 
+    .map { case ((engineParams, evalDataSet), idx) =>
       val metricScores = MetricScores[R](
         metric.calculate(sc, evalDataSet),
         otherMetrics.map(_.calculate(sc, evalDataSet)))
       (engineParams, metricScores)
     }
     .seq
-    
+
     implicit lazy val formats = Utils.json4sDefaultFormats +
       new NameParamsSerializer
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/core/src/main/scala/io/prediction/controller/package.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/package.scala b/core/src/main/scala/io/prediction/controller/package.scala
index b344d3e..bcb4b0d 100644
--- a/core/src/main/scala/io/prediction/controller/package.scala
+++ b/core/src/main/scala/io/prediction/controller/package.scala
@@ -119,7 +119,7 @@ package object controller {
     * @group Helper
     */
   type EmptyDataParams = EmptyParams
-  
+
   /** Empty evaluation info.
     * @group Helper
     */

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/core/src/main/scala/io/prediction/core/BaseEngine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/core/BaseEngine.scala b/core/src/main/scala/io/prediction/core/BaseEngine.scala
index 546e889..5356fa7 100644
--- a/core/src/main/scala/io/prediction/core/BaseEngine.scala
+++ b/core/src/main/scala/io/prediction/core/BaseEngine.scala
@@ -44,7 +44,7 @@ abstract class BaseEngine[EI, Q, P, A] extends Serializable {
     */
   @DeveloperApi
   def train(
-    sc: SparkContext, 
+    sc: SparkContext,
     engineParams: EngineParams,
     engineInstanceId: String,
     params: WorkflowParams): Seq[Any]
@@ -61,7 +61,7 @@ abstract class BaseEngine[EI, Q, P, A] extends Serializable {
     */
   @DeveloperApi
   def eval(
-    sc: SparkContext, 
+    sc: SparkContext,
     engineParams: EngineParams,
     params: WorkflowParams): Seq[(EI, RDD[(Q, P, A)])]
 
@@ -77,11 +77,11 @@ abstract class BaseEngine[EI, Q, P, A] extends Serializable {
     */
   @DeveloperApi
   def batchEval(
-    sc: SparkContext, 
+    sc: SparkContext,
     engineParamsList: Seq[EngineParams],
     params: WorkflowParams)
   : Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])] = {
-    engineParamsList.map { engineParams => 
+    engineParamsList.map { engineParams =>
       (engineParams, eval(sc, engineParams, params))
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/core/src/main/scala/io/prediction/core/BaseEvaluator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/core/BaseEvaluator.scala b/core/src/main/scala/io/prediction/core/BaseEvaluator.scala
index 19eb8a5..23fe826 100644
--- a/core/src/main/scala/io/prediction/core/BaseEvaluator.scala
+++ b/core/src/main/scala/io/prediction/core/BaseEvaluator.scala
@@ -57,10 +57,10 @@ abstract class BaseEvaluator[EI, Q, P, A, ER <: BaseEvaluatorResult]
 trait BaseEvaluatorResult extends Serializable {
   /** A short description of the result */
   def toOneLiner(): String = ""
-  
+
   /** HTML portion of the rendered evaluator results */
   def toHTML(): String = ""
-  
+
   /** JSON portion of the rendered evaluator results */
   def toJSON(): String = ""
 
@@ -68,5 +68,5 @@ trait BaseEvaluatorResult extends Serializable {
     * Indicate if this result is inserted into database
     */
   @Experimental
-  val noSave: Boolean = false 
+  val noSave: Boolean = false
 }

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala b/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala
index 5ef6fb4..ad93b1a 100644
--- a/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala
+++ b/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala
@@ -72,7 +72,7 @@ object CoreWorkflow {
       val instanceId = Storage.getMetaDataEngineInstances
 
       val kryo = KryoInstantiator.newKryoInjection
-      
+
       logger.info("Inserting persistent model")
       Storage.getModelDataModels.insert(Model(
         id = engineInstance.id,
@@ -135,7 +135,7 @@ object CoreWorkflow {
       evaluator,
       params)
 
-    if (evaluatorResult.noSave) { 
+    if (evaluatorResult.noSave) {
       logger.info(s"This evaluation result is not inserted into database: $evaluatorResult")
     } else {
       val evaluatedEvaluationInstance = evaluationInstance.copy(

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala b/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala
index ccb600f..350a430 100644
--- a/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala
+++ b/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala
@@ -15,7 +15,7 @@
 
 package io.prediction.workflow
 
-import io.prediction.annotation.Experimental   
+import io.prediction.annotation.Experimental
 // FIXME(yipjustin): Remove wildcard import.
 import io.prediction.core._
 import io.prediction.controller._
@@ -32,7 +32,7 @@ extends BaseEngine[EmptyParams, EmptyParams, EmptyParams, EmptyParams] {
   @transient lazy val logger = Logger[this.type]
 
   def train(
-    sc: SparkContext, 
+    sc: SparkContext,
     engineParams: EngineParams,
     engineInstanceId: String,
     params: WorkflowParams): Seq[Any] = {
@@ -40,7 +40,7 @@ extends BaseEngine[EmptyParams, EmptyParams, EmptyParams, EmptyParams] {
   }
 
   def eval(
-    sc: SparkContext, 
+    sc: SparkContext,
     engineParams: EngineParams,
     params: WorkflowParams)
   : Seq[(EmptyParams, RDD[(EmptyParams, EmptyParams, EmptyParams)])] = {
@@ -56,7 +56,7 @@ private[prediction] class FakeRunner(f: (SparkContext => Unit))
   def evaluateBase(
     sc: SparkContext,
     evaluation: Evaluation,
-    engineEvalDataSet: 
+    engineEvalDataSet:
         Seq[(EngineParams, Seq[(EmptyParams, RDD[(EmptyParams, EmptyParams, EmptyParams)])])],
     params: WorkflowParams): FakeEvalResult = {
     f(sc)
@@ -66,36 +66,36 @@ private[prediction] class FakeRunner(f: (SparkContext => Unit))
 
 @Experimental
 private[prediction] case class FakeEvalResult() extends BaseEvaluatorResult {
-  override val noSave: Boolean = true 
+  override val noSave: Boolean = true
 }
 
 /** FakeRun allows user to implement custom function under the exact enviroment
-  * as other PredictionIO workflow. 
+  * as other PredictionIO workflow.
   *
-  * Useful for developing new features. Only need to extend this trait and 
-  * implement a function: (SparkContext => Unit). For example, the code below 
+  * Useful for developing new features. Only need to extend this trait and
+  * implement a function: (SparkContext => Unit). For example, the code below
   * can be run with `pio eval HelloWorld`.
   *
   * {{{
   * object HelloWorld extends FakeRun {
   *   // func defines the function pio runs, must have signature (SparkContext => Unit).
   *   func = f
-  * 
+  *
   *   def f(sc: SparkContext): Unit {
   *     val logger = Logger[this.type]
   *     logger.info("HelloWorld")
   *   }
   * }
-  * }}} 
-  * 
+  * }}}
+  *
   */
 @Experimental
 trait FakeRun extends Evaluation with EngineParamsGenerator {
   private[this] var _runner: FakeRunner = _
 
   def runner: FakeRunner = _runner
-  def runner_=(r: FakeRunner) { 
-    engineEvaluator = (new FakeEngine(), r) 
+  def runner_=(r: FakeRunner) {
+    engineEvaluator = (new FakeEngine(), r)
     engineParamsList = Seq(new EngineParams())
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/data/src/main/scala/io/prediction/data/storage/BiMap.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/BiMap.scala b/data/src/main/scala/io/prediction/data/storage/BiMap.scala
index 9153782..cbf3e12 100644
--- a/data/src/main/scala/io/prediction/data/storage/BiMap.scala
+++ b/data/src/main/scala/io/prediction/data/storage/BiMap.scala
@@ -130,7 +130,7 @@ object BiMap {
     new BiMap(HashMap(ki : _*))
   }
 
-  /** Create a BiMap[String, Double] from a set of String. The Double index 
+  /** Create a BiMap[String, Double] from a set of String. The Double index
     * starts from 0.
     * @param keys a set of String
     * @return a String to Double BiMap

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala
b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala
index 80fd06f..ee5e9e7 100644
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala
+++ b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala
@@ -12,7 +12,7 @@
   * See the License for the specific language governing permissions and
   * limitations under the License.
   */
-  
+
 package io.prediction.data.storage.elasticsearch
 
 import grizzled.slf4j.Logging

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/data/src/main/scala/io/prediction/data/storage/hbase/PIOHBaseUtil.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/PIOHBaseUtil.scala b/data/src/main/scala/io/prediction/data/storage/hbase/PIOHBaseUtil.scala
index 89b3f3b..1027930 100644
--- a/data/src/main/scala/io/prediction/data/storage/hbase/PIOHBaseUtil.scala
+++ b/data/src/main/scala/io/prediction/data/storage/hbase/PIOHBaseUtil.scala
@@ -12,7 +12,7 @@
   * See the License for the specific language governing permissions and
   * limitations under the License.
   */
-  
+
 package org.apache.hadoop.hbase.mapreduce
 
 /* Pretends to be hbase.mapreduce package in order to expose its

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCLEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCLEvents.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCLEvents.scala
index 7a6de4e..425af1b 100644
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCLEvents.scala
+++ b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCLEvents.scala
@@ -32,8 +32,8 @@ import scala.concurrent.Future
 
 /** JDBC implementation of [[LEvents]] */
 class JDBCLEvents(
-    client: String, 
-    config: StorageClientConfig, 
+    client: String,
+    config: StorageClientConfig,
     namespace: String) extends LEvents with Logging {
   implicit private val formats = org.json4s.DefaultFormats
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/data/src/main/scala/io/prediction/data/store/LEventStore.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/store/LEventStore.scala b/data/src/main/scala/io/prediction/data/store/LEventStore.scala
index 1845942..be543eb 100644
--- a/data/src/main/scala/io/prediction/data/store/LEventStore.scala
+++ b/data/src/main/scala/io/prediction/data/store/LEventStore.scala
@@ -87,7 +87,7 @@ object LEventStore {
   }
 
   /** Reads events generically. If entityType or entityId is not specified, it
-    * results in table scan. 
+    * results in table scan.
     *
     * @param appName return events of this app
     * @param entityType return events of this entityType

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala
b/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala
index 1d35d30..b2793a0 100644
--- a/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala
+++ b/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala
@@ -76,7 +76,7 @@ private[prediction] object MailChimpConnector extends FormConnector {
     "data[merges][LNAME]": "API",
     "data[merges][INTERESTS]": "Group1,Group2",
     "data[ip_opt]": "10.20.10.30",
-    "data[ip_signup]": "10.20.10.30"    
+    "data[ip_signup]": "10.20.10.30"
     */
 
     // convert to ISO8601 format
@@ -209,11 +209,11 @@ private[prediction] object MailChimpConnector extends FormConnector
{
     import org.json4s.JsonDSL._
 
     /*
-    "type": "upemail", 
-    "fired_at": "2009-03-26 22:15:09", 
+    "type": "upemail",
+    "fired_at": "2009-03-26 22:15:09",
     "data[list_id]": "a6b5da1054",
-    "data[new_id]": "51da8c3259", 
-    "data[new_email]": "api+new@mailchimp.com", 
+    "data[new_id]": "51da8c3259",
+    "data[new_email]": "api+new@mailchimp.com",
     "data[old_email]": "api+old@mailchimp.com"
     */
 
@@ -242,8 +242,8 @@ private[prediction] object MailChimpConnector extends FormConnector {
 
     /*
     Reason will be one of "hard" (for hard bounces) or "abuse"
-    "type": "cleaned", 
-    "fired_at": "2009-03-26 22:01:00", 
+    "type": "cleaned",
+    "fired_at": "2009-03-26 22:01:00",
     "data[list_id]": "a6b5da1054",
     "data[campaign_id]": "4fjk2ma9xd",
     "data[reason]": "hard",
@@ -273,12 +273,12 @@ private[prediction] object MailChimpConnector extends FormConnector
{
     import org.json4s.JsonDSL._
 
     /*
-    "type": "campaign", 
-    "fired_at": "2009-03-26 21:31:21", 
+    "type": "campaign",
+    "fired_at": "2009-03-26 21:31:21",
     "data[id]": "5aa2102003",
-    "data[subject]": "Test Campaign Subject", 
+    "data[subject]": "Test Campaign Subject",
     "data[status]": "sent",
-    "data[reason]": "", 
+    "data[reason]": "",
     "data[list_id]": "a6b5da1054"
     */
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/e2/src/main/scala/io/prediction/e2/engine/BinaryVectorizer.scala
----------------------------------------------------------------------
diff --git a/e2/src/main/scala/io/prediction/e2/engine/BinaryVectorizer.scala b/e2/src/main/scala/io/prediction/e2/engine/BinaryVectorizer.scala
index b57a24f..75e818a 100644
--- a/e2/src/main/scala/io/prediction/e2/engine/BinaryVectorizer.scala
+++ b/e2/src/main/scala/io/prediction/e2/engine/BinaryVectorizer.scala
@@ -21,8 +21,8 @@ import org.apache.spark.mllib.linalg.Vector
 import scala.collection.immutable.HashMap
 import scala.collection.immutable.HashSet
 
-class BinaryVectorizer(propertyMap : HashMap[(String, String), Int]) 
-extends Serializable {
+class BinaryVectorizer(propertyMap : HashMap[(String, String), Int])
+    extends Serializable {
 
   val properties: Array[(String, String)] = propertyMap.toArray.sortBy(_._2).map(_._1)
   val numFeatures = propertyMap.size
@@ -58,3 +58,4 @@ object BinaryVectorizer {
     new BinaryVectorizer(HashMap(indexed:_*))
   }
 }
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/scalastyle-config.xml
----------------------------------------------------------------------
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
index d0afc1b..d24468f 100644
--- a/scalastyle-config.xml
+++ b/scalastyle-config.xml
@@ -95,4 +95,8 @@ limitations under the License.
     <check enabled="true"
            class="org.scalastyle.scalariform.PublicMethodsHaveTypeChecker"
            level="error"/>
+    <check level="error"
+           class="org.scalastyle.file.WhitespaceEndOfLineChecker"
+           enabled="true"/>
+
 </scalastyle>

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala b/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala
index f5d440e..b18690e 100644
--- a/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala
+++ b/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala
@@ -162,7 +162,7 @@ object RunWorkflow extends Logging {
       // If engineParamsGenerator is specified, it overrides the evaluation.
       ca.common.engineParamsGenerator.orElse(ca.common.evaluation)
         .map(x => Seq("--engine-params-generator-class", x))
-        .getOrElse(Seq()) ++ 
+        .getOrElse(Seq()) ++
       (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq()) ++
       Seq("--json-extractor", ca.common.jsonExtractor.toString)
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala b/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala
index 9389d0e..924b6f0 100644
--- a/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala
+++ b/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala
@@ -67,7 +67,7 @@ class CommandClient(
     } getOrElse {
       appClient.get(req.id) map {
         app2 =>
-          GeneralResponse(0, 
+          GeneralResponse(0,
               s"App ID ${app2.id} already exists and maps to the app '${app2.name}'. " +
               "Aborting.")
       } getOrElse {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/7a40f737/tools/src/main/scala/io/prediction/tools/dashboard/CorsSupport.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/dashboard/CorsSupport.scala b/tools/src/main/scala/io/prediction/tools/dashboard/CorsSupport.scala
index d0708f5..3d2c888 100644
--- a/tools/src/main/scala/io/prediction/tools/dashboard/CorsSupport.scala
+++ b/tools/src/main/scala/io/prediction/tools/dashboard/CorsSupport.scala
@@ -28,7 +28,7 @@ import spray.http.ContentTypes
 // see also https://developer.mozilla.org/en-US/docs/Web/HTTP/Access_control_CORS
 trait CORSSupport {
   this: HttpService =>
-  
+
   private val allowOriginHeader = `Access-Control-Allow-Origin`(AllOrigins)
   private val optionsCorsHeaders = List(
     `Access-Control-Allow-Headers`("""Origin,
@@ -42,18 +42,18 @@ trait CORSSupport {
                                       |User-Agent""".stripMargin.replace("\n", " ")),
     `Access-Control-Max-Age`(1728000)
   )
- 
-  def cors[T]: Directive0 = mapRequestContext { ctx => 
+
+  def cors[T]: Directive0 = mapRequestContext { ctx =>
     ctx.withRouteResponseHandling {
       // OPTION request for a resource that responds to other methods
-      case Rejected(x) if (ctx.request.method.equals(HttpMethods.OPTIONS) && 
+      case Rejected(x) if (ctx.request.method.equals(HttpMethods.OPTIONS) &&
           x.exists(_.isInstanceOf[MethodRejection])) => {
-        val allowedMethods: List[HttpMethod] = x.collect { 
+        val allowedMethods: List[HttpMethod] = x.collect {
           case rejection: MethodRejection => rejection.supported
         }
         ctx.complete {
           HttpResponse().withHeaders(
-            `Access-Control-Allow-Methods`(HttpMethods.OPTIONS, allowedMethods :_*) :: 
+            `Access-Control-Allow-Methods`(HttpMethods.OPTIONS, allowedMethods :_*) ::
             allowOriginHeader ::
             optionsCorsHeaders
           )
@@ -63,11 +63,11 @@ trait CORSSupport {
       allowOriginHeader :: headers
     }
   }
-  
+
   override def timeoutRoute: StandardRoute = complete {
     HttpResponse(
       StatusCodes.InternalServerError,
-      HttpEntity(ContentTypes.`text/plain(UTF-8)`, 
+      HttpEntity(ContentTypes.`text/plain(UTF-8)`,
           "The server was not able to produce a timely response to your request."),
       List(allowOriginHeader)
     )


Mime
View raw message