predictionio-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From shimam...@apache.org
Subject [10/11] incubator-predictionio git commit: [PIO-97] Fixes examples of the official templates for v0.11.0-incubating.
Date Mon, 10 Jul 2017 04:10:13 GMT
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/CompleteEvaluation.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/CompleteEvaluation.scala b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/CompleteEvaluation.scala
new file mode 100644
index 0000000..ea997e2
--- /dev/null
+++ b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/CompleteEvaluation.scala
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.examples.classification
+
+import org.apache.predictionio.controller.Evaluation
+import org.apache.predictionio.controller.MetricEvaluator
+
+object CompleteEvaluation extends Evaluation {
+  engineEvaluator = (
+    ClassificationEngine(),
+    MetricEvaluator(
+      metric = Accuracy(),
+      otherMetrics = Seq(Precision(0.0), Precision(1.0), Precision(2.0)),
+      outputPath = "best.json"))
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/DataSource.scala b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/DataSource.scala
new file mode 100644
index 0000000..ef28488
--- /dev/null
+++ b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/DataSource.scala
@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.examples.classification
+
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.store.PEventStore
+
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.regression.LabeledPoint
+import org.apache.spark.mllib.linalg.Vectors
+
+import grizzled.slf4j.Logger
+
+case class DataSourceParams(
+  appName: String,
+  evalK: Option[Int]  // define the k-fold parameter.
+) extends Params
+
+class DataSource(val dsp: DataSourceParams)
+  extends PDataSource[TrainingData,
+      EmptyEvaluationInfo, Query, ActualResult] {
+
+  @transient lazy val logger = Logger[this.type]
+
+  override
+  def readTraining(sc: SparkContext): TrainingData = {
+
+    val labeledPoints: RDD[LabeledPoint] = PEventStore.aggregateProperties(
+      appName = dsp.appName,
+      entityType = "item", // MODIFIED
+      // only keep entities with these required properties defined
+      required = Some(List( // MODIFIED
+        "featureA", "featureB", "featureC", "featureD", "label")))(sc)
+      // aggregateProperties() returns RDD pair of
+      // entity ID and its aggregated properties
+      .map { case (entityId, properties) =>
+        try {
+          // MODIFIED
+          LabeledPoint(properties.get[Double]("label"),
+            Vectors.dense(Array(
+              properties.get[Double]("featureA"),
+              properties.get[Double]("featureB"),
+              properties.get[Double]("featureC"),
+              properties.get[Double]("featureD")
+            ))
+          )
+        } catch {
+          case e: Exception => {
+            logger.error(s"Failed to get properties ${properties} of" +
+              s" ${entityId}. Exception: ${e}.")
+            throw e
+          }
+        }
+      }.cache()
+
+    new TrainingData(labeledPoints)
+  }
+
+  override
+  def readEval(sc: SparkContext)
+  : Seq[(TrainingData, EmptyEvaluationInfo, RDD[(Query, ActualResult)])] = {
+    require(dsp.evalK.nonEmpty, "DataSourceParams.evalK must not be None")
+
+    // The following code reads the data from data store. It is equivalent to
+    // the readTraining method. We copy-and-paste the exact code here for
+    // illustration purpose, a recommended approach is to factor out this logic
+    // into a helper function and have both readTraining and readEval call the
+    // helper.
+    val labeledPoints: RDD[LabeledPoint] = PEventStore.aggregateProperties(
+      appName = dsp.appName,
+      entityType = "item", // MODIFIED
+      // only keep entities with these required properties defined
+      required = Some(List( // MODIFIED
+        "featureA", "featureB", "featureC", "featureD", "label")))(sc)
+      // aggregateProperties() returns RDD pair of
+      // entity ID and its aggregated properties
+      .map { case (entityId, properties) =>
+        try {
+          // MODIFIED
+          LabeledPoint(properties.get[Double]("label"),
+            Vectors.dense(Array(
+              properties.get[Double]("featureA"),
+              properties.get[Double]("featureB"),
+              properties.get[Double]("featureC"),
+              properties.get[Double]("featureD")
+            ))
+          )
+        } catch {
+          case e: Exception => {
+            logger.error(s"Failed to get properties ${properties} of" +
+              s" ${entityId}. Exception: ${e}.")
+            throw e
+          }
+        }
+      }.cache()
+    // End of reading from data store
+
+    // K-fold splitting
+    val evalK = dsp.evalK.get
+    val indexedPoints: RDD[(LabeledPoint, Long)] = labeledPoints.zipWithIndex()
+
+    (0 until evalK).map { idx =>
+      val trainingPoints = indexedPoints.filter(_._2 % evalK != idx).map(_._1)
+      val testingPoints = indexedPoints.filter(_._2 % evalK == idx).map(_._1)
+
+      (
+        new TrainingData(trainingPoints),
+        new EmptyEvaluationInfo(),
+        testingPoints.map {
+          // MODIFIED
+          p => (new Query(p.features(0), p.features(1), p.features(2), p.features(3)), new ActualResult(p.label))
+        }
+      )
+    }
+  }
+}
+
+class TrainingData(
+  val labeledPoints: RDD[LabeledPoint]
+) extends Serializable

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Engine.scala b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Engine.scala
new file mode 100644
index 0000000..ddbf7dd
--- /dev/null
+++ b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Engine.scala
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.examples.classification
+
+import org.apache.predictionio.controller.EngineFactory
+import org.apache.predictionio.controller.Engine
+
+// MODIFIED
+class Query(
+  val featureA : Double,
+  val featureB : Double,
+  val featureC : Double,
+  val featureD : Double
+) extends Serializable
+
+class PredictedResult(
+  val label: Double
+) extends Serializable
+
+class ActualResult(
+  val label: Double
+) extends Serializable
+
+object ClassificationEngine extends EngineFactory {
+  def apply() = {
+    new Engine(
+      classOf[DataSource],
+      classOf[Preparator],
+      Map("naive" -> classOf[NaiveBayesAlgorithm]),
+      classOf[Serving])
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Evaluation.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Evaluation.scala b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Evaluation.scala
new file mode 100644
index 0000000..3bc3399
--- /dev/null
+++ b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Evaluation.scala
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.examples.classification
+
+import org.apache.predictionio.controller.AverageMetric
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.controller.EngineParamsGenerator
+import org.apache.predictionio.controller.Evaluation
+
+case class Accuracy()
+  extends AverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
+  def calculate(query: Query, predicted: PredictedResult, actual: ActualResult)
+  : Double = (if (predicted.label == actual.label) 1.0 else 0.0)
+}
+
+object AccuracyEvaluation extends Evaluation {
+  // Define Engine and Metric used in Evaluation
+  engineMetric = (ClassificationEngine(), new Accuracy())
+}
+
+object EngineParamsList extends EngineParamsGenerator {
+  // Define list of EngineParams used in Evaluation
+
+  // First, we define the base engine params. It specifies the appId from which
+  // the data is read, and a evalK parameter is used to define the
+  // cross-validation.
+  private[this] val baseEP = EngineParams(
+    dataSourceParams = DataSourceParams(appName = "INVALID_APP_NAME", evalK = Some(5)))
+
+  // Second, we specify the engine params list by explicitly listing all
+  // algorithm parameters. In this case, we evaluate 3 engine params, each with
+  // a different algorithm params value.
+  engineParamsList = Seq(
+    baseEP.copy(algorithmParamsList = Seq(("naive", AlgorithmParams(10.0)))),
+    baseEP.copy(algorithmParamsList = Seq(("naive", AlgorithmParams(100.0)))),
+    baseEP.copy(algorithmParamsList = Seq(("naive", AlgorithmParams(1000.0)))))
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/NaiveBayesAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/NaiveBayesAlgorithm.scala b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/NaiveBayesAlgorithm.scala
new file mode 100644
index 0000000..f12d9a3
--- /dev/null
+++ b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/NaiveBayesAlgorithm.scala
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.examples.classification
+
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
+
+import org.apache.spark.mllib.classification.NaiveBayes
+import org.apache.spark.mllib.classification.NaiveBayesModel
+import org.apache.spark.mllib.linalg.Vectors
+import org.apache.spark.SparkContext
+
+import grizzled.slf4j.Logger
+
+case class AlgorithmParams(
+  lambda: Double
+) extends Params
+
+// extends P2LAlgorithm because the MLlib's NaiveBayesModel doesn't contain RDD.
+class NaiveBayesAlgorithm(val ap: AlgorithmParams)
+  extends P2LAlgorithm[PreparedData, NaiveBayesModel, Query, PredictedResult] {
+
+  @transient lazy val logger = Logger[this.type]
+
+  def train(sc: SparkContext, data: PreparedData): NaiveBayesModel = {
+    // MLLib NaiveBayes cannot handle empty training data.
+    require(data.labeledPoints.take(1).nonEmpty,
+      s"RDD[labeledPoints] in PreparedData cannot be empty." +
+      " Please check if DataSource generates TrainingData" +
+      " and Preparator generates PreparedData correctly.")
+
+    NaiveBayes.train(data.labeledPoints, ap.lambda)
+  }
+
+  def predict(model: NaiveBayesModel, query: Query): PredictedResult = {
+    val label = model.predict(Vectors.dense(
+      // MODIFIED
+      Array(query.featureA, query.featureB, query.featureC, query.featureD)
+    ))
+    new PredictedResult(label)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/PrecisionEvaluation.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/PrecisionEvaluation.scala b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/PrecisionEvaluation.scala
new file mode 100644
index 0000000..cd91a1e
--- /dev/null
+++ b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/PrecisionEvaluation.scala
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.examples.classification
+
+import org.apache.predictionio.controller.OptionAverageMetric
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.Evaluation
+
+case class Precision(label: Double)
+  extends OptionAverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
+  override def header: String = s"Precision(label = $label)"
+
+  def calculate(query: Query, predicted: PredictedResult, actual: ActualResult)
+  : Option[Double] = {
+    if (predicted.label == label) {
+      if (predicted.label == actual.label) {
+        Some(1.0)  // True positive
+      } else {
+        Some(0.0)  // False positive
+      }
+    } else {
+      None  // Unrelated case for calculating precision
+    }
+  }
+}
+
+object PrecisionEvaluation extends Evaluation {
+  engineMetric = (ClassificationEngine(), new Precision(label = 1.0))
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Preparator.scala b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Preparator.scala
new file mode 100644
index 0000000..20d8f8c
--- /dev/null
+++ b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Preparator.scala
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.examples.classification
+
+import org.apache.predictionio.controller.PPreparator
+
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.regression.LabeledPoint
+
+class PreparedData(
+  val labeledPoints: RDD[LabeledPoint]
+) extends Serializable
+
+class Preparator extends PPreparator[TrainingData, PreparedData] {
+
+  def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
+    new PreparedData(trainingData.labeledPoints)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Serving.scala b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Serving.scala
new file mode 100644
index 0000000..706dfe2
--- /dev/null
+++ b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Serving.scala
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.examples.classification
+
+import org.apache.predictionio.controller.LServing
+
+class Serving extends LServing[Query, PredictedResult] {
+
+  override
+  def serve(query: Query,
+    predictedResults: Seq[PredictedResult]): PredictedResult = {
+    predictedResults.head
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-classification/reading-custom-properties/template.json
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/reading-custom-properties/template.json b/examples/scala-parallel-classification/reading-custom-properties/template.json
new file mode 100644
index 0000000..d076ec5
--- /dev/null
+++ b/examples/scala-parallel-classification/reading-custom-properties/template.json
@@ -0,0 +1 @@
+{"pio": {"version": { "min": "0.10.0-incubating" }}}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/README.md
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/README.md b/examples/scala-parallel-ecommercerecommendation/README.md
new file mode 100644
index 0000000..c750458
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/README.md
@@ -0,0 +1,20 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+This is based on E-Commerce Recommendation Template v0.11.0-incubating.
+
+Please refer to http://predictionio.incubator.apache.org/templates/ecommercerecommendation/how-to/
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/adjust-score/.gitignore
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/.gitignore b/examples/scala-parallel-ecommercerecommendation/adjust-score/.gitignore
new file mode 100644
index 0000000..57841c6
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/.gitignore
@@ -0,0 +1,4 @@
+manifest.json
+target/
+pio.log
+/pio.sbt

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/adjust-score/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/build.sbt b/examples/scala-parallel-ecommercerecommendation/adjust-score/build.sbt
new file mode 100644
index 0000000..85b21d6
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/build.sbt
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+name := "template-scala-parallel-ecommercerecommendation"
+
+organization := "org.apache.predictionio"
+scalaVersion := "2.11.8"
+libraryDependencies ++= Seq(
+  "org.apache.predictionio" %% "apache-predictionio-core" % "0.11.0-incubating" % "provided",
+  "org.apache.spark"        %% "spark-mllib"              % "2.1.1" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/adjust-score/data/import_eventserver.py
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/data/import_eventserver.py b/examples/scala-parallel-ecommercerecommendation/adjust-score/data/import_eventserver.py
new file mode 100644
index 0000000..eee2672
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/data/import_eventserver.py
@@ -0,0 +1,101 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+Import sample data for E-Commerce Recommendation Engine Template
+"""
+
+import predictionio
+import argparse
+import random
+
+SEED = 3
+
+def import_events(client):
+  random.seed(SEED)
+  count = 0
+  print(client.get_status())
+  print("Importing data...")
+
+  # generate 10 users, with user ids u1,u2,....,u10
+  user_ids = ["u%s" % i for i in range(1, 11)]
+  for user_id in user_ids:
+    print("Set user", user_id)
+    client.create_event(
+      event="$set",
+      entity_type="user",
+      entity_id=user_id
+    )
+    count += 1
+
+  # generate 50 items, with item ids i1,i2,....,i50
+  # random assign 1 to 4 categories among c1-c6 to items
+  categories = ["c%s" % i for i in range(1, 7)]
+  item_ids = ["i%s" % i for i in range(1, 51)]
+  for item_id in item_ids:
+    print("Set item", item_id)
+    client.create_event(
+      event="$set",
+      entity_type="item",
+      entity_id=item_id,
+      properties={
+        "categories" : random.sample(categories, random.randint(1, 4))
+      }
+    )
+    count += 1
+
+  # each user randomly viewed 10 items
+  for user_id in user_ids:
+    for viewed_item in random.sample(item_ids, 10):
+      print("User", user_id ,"views item", viewed_item)
+      client.create_event(
+        event="view",
+        entity_type="user",
+        entity_id=user_id,
+        target_entity_type="item",
+        target_entity_id=viewed_item
+      )
+      count += 1
+      # randomly buy some of the viewed items
+      if random.choice([True, False]):
+        print("User", user_id ,"buys item", viewed_item)
+        client.create_event(
+          event="buy",
+          entity_type="user",
+          entity_id=user_id,
+          target_entity_type="item",
+          target_entity_id=viewed_item
+        )
+        count += 1
+
+  print("%s events are imported." % count)
+
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser(
+    description="Import sample data for e-commerce recommendation engine")
+  parser.add_argument('--access_key', default='invald_access_key')
+  parser.add_argument('--url', default="http://localhost:7070")
+
+  args = parser.parse_args()
+  print(args)
+
+  client = predictionio.EventClient(
+    access_key=args.access_key,
+    url=args.url,
+    threads=5,
+    qsize=500)
+  import_events(client)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/adjust-score/data/send_query.py
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/data/send_query.py b/examples/scala-parallel-ecommercerecommendation/adjust-score/data/send_query.py
new file mode 100644
index 0000000..c3a5dd2
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/data/send_query.py
@@ -0,0 +1,24 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+Send sample query to prediction engine
+"""
+
+import predictionio
+engine_client = predictionio.EngineClient(url="http://localhost:8000")
+print(engine_client.send_query({"user": "u1", "num": 4}))

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/adjust-score/engine.json
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/engine.json b/examples/scala-parallel-ecommercerecommendation/adjust-score/engine.json
new file mode 100644
index 0000000..71b1630
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/engine.json
@@ -0,0 +1,25 @@
+{
+  "id": "default",
+  "description": "Default settings",
+  "engineFactory": "org.apache.predictionio.examples.ecommercerecommendation.ECommerceRecommendationEngine",
+  "datasource": {
+    "params" : {
+      "appName": "MyApp1"
+    }
+  },
+  "algorithms": [
+    {
+      "name": "ecomm",
+      "params": {
+        "appName": "MyApp1",
+        "unseenOnly": true,
+        "seenEvents": ["buy", "view"],
+        "similarEvents": ["view"],
+        "rank": 10,
+        "numIterations" : 20,
+        "lambda": 0.01,
+        "seed": 3
+      }
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/adjust-score/project/assembly.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/project/assembly.sbt b/examples/scala-parallel-ecommercerecommendation/adjust-score/project/assembly.sbt
new file mode 100644
index 0000000..e17409e
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/project/assembly.sbt
@@ -0,0 +1 @@
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.4")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/adjust-score/project/build.properties
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/project/build.properties b/examples/scala-parallel-ecommercerecommendation/adjust-score/project/build.properties
new file mode 100644
index 0000000..64317fd
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/project/build.properties
@@ -0,0 +1 @@
+sbt.version=0.13.15

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/DataSource.scala b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/DataSource.scala
new file mode 100644
index 0000000..b40ace9
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/DataSource.scala
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.examples.ecommercerecommendation
+
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.store.PEventStore
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+import grizzled.slf4j.Logger
+
+case class DataSourceParams(appName: String) extends Params
+
+class DataSource(val dsp: DataSourceParams)
+  extends PDataSource[TrainingData,
+      EmptyEvaluationInfo, Query, EmptyActualResult] {
+
+  @transient lazy val logger = Logger[this.type]
+
+  override
+  def readTraining(sc: SparkContext): TrainingData = {
+
+    // create a RDD of (entityID, User)
+    val usersRDD: RDD[(String, User)] = PEventStore.aggregateProperties(
+      appName = dsp.appName,
+      entityType = "user"
+    )(sc).map { case (entityId, properties) =>
+      val user = try {
+        User()
+      } catch {
+        case e: Exception => {
+          logger.error(s"Failed to get properties ${properties} of" +
+            s" user ${entityId}. Exception: ${e}.")
+          throw e
+        }
+      }
+      (entityId, user)
+    }.cache()
+
+    // create a RDD of (entityID, Item)
+    val itemsRDD: RDD[(String, Item)] = PEventStore.aggregateProperties(
+      appName = dsp.appName,
+      entityType = "item"
+    )(sc).map { case (entityId, properties) =>
+      val item = try {
+        // Assume categories is optional property of item.
+        Item(categories = properties.getOpt[List[String]]("categories"))
+      } catch {
+        case e: Exception => {
+          logger.error(s"Failed to get properties ${properties} of" +
+            s" item ${entityId}. Exception: ${e}.")
+          throw e
+        }
+      }
+      (entityId, item)
+    }.cache()
+
+    val eventsRDD: RDD[Event] = PEventStore.find(
+      appName = dsp.appName,
+      entityType = Some("user"),
+      eventNames = Some(List("view", "buy")),
+      // targetEntityType is optional field of an event.
+      targetEntityType = Some(Some("item")))(sc)
+      .cache()
+
+    val viewEventsRDD: RDD[ViewEvent] = eventsRDD
+      .filter { event => event.event == "view" }
+      .map { event =>
+        try {
+          ViewEvent(
+            user = event.entityId,
+            item = event.targetEntityId.get,
+            t = event.eventTime.getMillis
+          )
+        } catch {
+          case e: Exception =>
+            logger.error(s"Cannot convert ${event} to ViewEvent." +
+              s" Exception: ${e}.")
+            throw e
+        }
+      }
+
+    val buyEventsRDD: RDD[BuyEvent] = eventsRDD
+      .filter { event => event.event == "buy" }
+      .map { event =>
+        try {
+          BuyEvent(
+            user = event.entityId,
+            item = event.targetEntityId.get,
+            t = event.eventTime.getMillis
+          )
+        } catch {
+          case e: Exception =>
+            logger.error(s"Cannot convert ${event} to BuyEvent." +
+              s" Exception: ${e}.")
+            throw e
+        }
+      }
+
+    new TrainingData(
+      users = usersRDD,
+      items = itemsRDD,
+      viewEvents = viewEventsRDD,
+      buyEvents = buyEventsRDD
+    )
+  }
+}
+
+case class User()
+
+case class Item(categories: Option[List[String]])
+
+case class ViewEvent(user: String, item: String, t: Long)
+
+case class BuyEvent(user: String, item: String, t: Long)
+
+class TrainingData(
+  val users: RDD[(String, User)],
+  val items: RDD[(String, Item)],
+  val viewEvents: RDD[ViewEvent],
+  val buyEvents: RDD[BuyEvent]
+) extends Serializable {
+  override def toString = {
+    s"users: [${users.count()} (${users.take(2).toList}...)]" +
+    s"items: [${items.count()} (${items.take(2).toList}...)]" +
+    s"viewEvents: [${viewEvents.count()}] (${viewEvents.take(2).toList}...)" +
+    s"buyEvents: [${buyEvents.count()}] (${buyEvents.take(2).toList}...)"
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/ECommAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/ECommAlgorithm.scala b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/ECommAlgorithm.scala
new file mode 100644
index 0000000..ef49dc2
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/ECommAlgorithm.scala
@@ -0,0 +1,647 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.examples.ecommercerecommendation
+
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.store.LEventStore
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.mllib.recommendation.ALS
+import org.apache.spark.mllib.recommendation.{Rating => MLlibRating}
+import org.apache.spark.rdd.RDD
+
+import grizzled.slf4j.Logger
+
+import scala.collection.mutable.PriorityQueue
+import scala.concurrent.duration.Duration
+import scala.concurrent.ExecutionContext.Implicits.global
+
+case class ECommAlgorithmParams(
+  appName: String,
+  unseenOnly: Boolean,
+  seenEvents: List[String],
+  similarEvents: List[String],
+  rank: Int,
+  numIterations: Int,
+  lambda: Double,
+  seed: Option[Long]
+) extends Params
+
+
+case class ProductModel(
+  item: Item,
+  features: Option[Array[Double]], // features by ALS
+  count: Int // popular count for default score
+)
+
+// ADDED
+case class WeightGroup(
+  items: Set[String],
+  weight: Double
+)
+
+class ECommModel(
+  val rank: Int,
+  val userFeatures: Map[Int, Array[Double]],
+  val productModels: Map[Int, ProductModel],
+  val userStringIntMap: BiMap[String, Int],
+  val itemStringIntMap: BiMap[String, Int]
+) extends Serializable {
+
+  @transient lazy val itemIntStringMap = itemStringIntMap.inverse
+
+  override def toString = {
+    s" rank: ${rank}" +
+    s" userFeatures: [${userFeatures.size}]" +
+    s"(${userFeatures.take(2).toList}...)" +
+    s" productModels: [${productModels.size}]" +
+    s"(${productModels.take(2).toList}...)" +
+    s" userStringIntMap: [${userStringIntMap.size}]" +
+    s"(${userStringIntMap.take(2).toString}...)]" +
+    s" itemStringIntMap: [${itemStringIntMap.size}]" +
+    s"(${itemStringIntMap.take(2).toString}...)]"
+  }
+}
+
+class ECommAlgorithm(val ap: ECommAlgorithmParams)
+  extends P2LAlgorithm[PreparedData, ECommModel, Query, PredictedResult] {
+
+  @transient lazy val logger = Logger[this.type]
+
+  def train(sc: SparkContext, data: PreparedData): ECommModel = {
+    require(!data.viewEvents.take(1).isEmpty,
+      s"viewEvents in PreparedData cannot be empty." +
+      " Please check if DataSource generates TrainingData" +
+      " and Preprator generates PreparedData correctly.")
+    require(!data.users.take(1).isEmpty,
+      s"users in PreparedData cannot be empty." +
+      " Please check if DataSource generates TrainingData" +
+      " and Preprator generates PreparedData correctly.")
+    require(!data.items.take(1).isEmpty,
+      s"items in PreparedData cannot be empty." +
+      " Please check if DataSource generates TrainingData" +
+      " and Preprator generates PreparedData correctly.")
+    // create User and item's String ID to integer index BiMap
+    val userStringIntMap = BiMap.stringInt(data.users.keys)
+    val itemStringIntMap = BiMap.stringInt(data.items.keys)
+
+    val mllibRatings: RDD[MLlibRating] = genMLlibRating(
+      userStringIntMap = userStringIntMap,
+      itemStringIntMap = itemStringIntMap,
+      data = data
+    )
+
+    // MLLib ALS cannot handle empty training data.
+    require(!mllibRatings.take(1).isEmpty,
+      s"mllibRatings cannot be empty." +
+      " Please check if your events contain valid user and item ID.")
+
+    // seed for MLlib ALS
+    val seed = ap.seed.getOrElse(System.nanoTime)
+
+    // use ALS to train feature vectors
+    val m = ALS.trainImplicit(
+      ratings = mllibRatings,
+      rank = ap.rank,
+      iterations = ap.numIterations,
+      lambda = ap.lambda,
+      blocks = -1,
+      alpha = 1.0,
+      seed = seed)
+
+    val userFeatures = m.userFeatures.collectAsMap.toMap
+
+    // convert ID to Int index
+    val items = data.items.map { case (id, item) =>
+      (itemStringIntMap(id), item)
+    }
+
+    // join item with the trained productFeatures
+    val productFeatures: Map[Int, (Item, Option[Array[Double]])] =
+      items.leftOuterJoin(m.productFeatures).collectAsMap.toMap
+
+    val popularCount = trainDefault(
+      userStringIntMap = userStringIntMap,
+      itemStringIntMap = itemStringIntMap,
+      data = data
+    )
+
+    val productModels: Map[Int, ProductModel] = productFeatures
+      .map { case (index, (item, features)) =>
+        val pm = ProductModel(
+          item = item,
+          features = features,
+          // NOTE: use getOrElse because popularCount may not contain all items.
+          count = popularCount.getOrElse(index, 0)
+        )
+        (index, pm)
+      }
+
+    new ECommModel(
+      rank = m.rank,
+      userFeatures = userFeatures,
+      productModels = productModels,
+      userStringIntMap = userStringIntMap,
+      itemStringIntMap = itemStringIntMap
+    )
+  }
+
+  /** Generate MLlibRating from PreparedData.
+    * You may customize this function if use different events or different aggregation method
+    */
+  def genMLlibRating(
+    userStringIntMap: BiMap[String, Int],
+    itemStringIntMap: BiMap[String, Int],
+    data: PreparedData): RDD[MLlibRating] = {
+
+    val mllibRatings = data.viewEvents
+      .map { r =>
+        // Convert user and item String IDs to Int index for MLlib
+        val uindex = userStringIntMap.getOrElse(r.user, -1)
+        val iindex = itemStringIntMap.getOrElse(r.item, -1)
+
+        if (uindex == -1)
+          logger.info(s"Couldn't convert nonexistent user ID ${r.user}"
+            + " to Int index.")
+
+        if (iindex == -1)
+          logger.info(s"Couldn't convert nonexistent item ID ${r.item}"
+            + " to Int index.")
+
+        ((uindex, iindex), 1)
+      }
+      .filter { case ((u, i), v) =>
+        // keep events with valid user and item index
+        (u != -1) && (i != -1)
+      }
+      .reduceByKey(_ + _) // aggregate all view events of same user-item pair
+      .map { case ((u, i), v) =>
+        // MLlibRating requires integer index for user and item
+        MLlibRating(u, i, v)
+      }
+      .cache()
+
+    mllibRatings
+  }
+
+  /** Train default model.
+    * You may customize this function if use different events or
+    * need different ways to count "popular" score or return default score for item.
+    */
+  def trainDefault(
+    userStringIntMap: BiMap[String, Int],
+    itemStringIntMap: BiMap[String, Int],
+    data: PreparedData): Map[Int, Int] = {
+    // count number of buys
+    // (item index, count)
+    val buyCountsRDD: RDD[(Int, Int)] = data.buyEvents
+      .map { r =>
+        // Convert user and item String IDs to Int index
+        val uindex = userStringIntMap.getOrElse(r.user, -1)
+        val iindex = itemStringIntMap.getOrElse(r.item, -1)
+
+        if (uindex == -1)
+          logger.info(s"Couldn't convert nonexistent user ID ${r.user}"
+            + " to Int index.")
+
+        if (iindex == -1)
+          logger.info(s"Couldn't convert nonexistent item ID ${r.item}"
+            + " to Int index.")
+
+        (uindex, iindex, 1)
+      }
+      .filter { case (u, i, v) =>
+        // keep events with valid user and item index
+        (u != -1) && (i != -1)
+      }
+      .map { case (u, i, v) => (i, 1) } // key is item
+      .reduceByKey{ case (a, b) => a + b } // count number of items occurrence
+
+    buyCountsRDD.collectAsMap.toMap
+  }
+
+  def predict(model: ECommModel, query: Query): PredictedResult = {
+
+    val userFeatures = model.userFeatures
+    val productModels = model.productModels
+
+    // convert whiteList's string ID to integer index
+    val whiteList: Option[Set[Int]] = query.whiteList.map( set =>
+      set.flatMap(model.itemStringIntMap.get(_))
+    )
+
+    val finalBlackList: Set[Int] = genBlackList(query = query)
+      // convert seen Items list from String ID to interger Index
+      .flatMap(x => model.itemStringIntMap.get(x))
+
+    // ADDED
+    val weights: Map[Int, Double] = (for {
+      group <- weightedItems
+      item <- group.items
+      index <- model.itemStringIntMap.get(item)
+    } yield (index, group.weight))
+      .toMap
+      .withDefaultValue(1.0)
+
+    val userFeature: Option[Array[Double]] =
+      model.userStringIntMap.get(query.user).flatMap { userIndex =>
+        userFeatures.get(userIndex)
+      }
+
+    val topScores: Array[(Int, Double)] = if (userFeature.isDefined) {
+      // the user has feature vector
+      predictKnownUser(
+        userFeature = userFeature.get,
+        productModels = productModels,
+        query = query,
+        whiteList = whiteList,
+        blackList = finalBlackList,
+        weights = weights // ADDED
+      )
+    } else {
+      // the user doesn't have feature vector.
+      // For example, new user is created after model is trained.
+      logger.info(s"No userFeature found for user ${query.user}.")
+
+      // check if the user has recent events on some items
+      val recentItems: Set[String] = getRecentItems(query)
+      val recentList: Set[Int] = recentItems.flatMap (x =>
+        model.itemStringIntMap.get(x))
+
+      val recentFeatures: Vector[Array[Double]] = recentList.toVector
+        // productModels may not contain the requested item
+        .map { i =>
+          productModels.get(i).flatMap { pm => pm.features }
+        }.flatten
+
+      if (recentFeatures.isEmpty) {
+        logger.info(s"No features vector for recent items ${recentItems}.")
+        predictDefault(
+          productModels = productModels,
+          query = query,
+          whiteList = whiteList,
+          blackList = finalBlackList,
+          weights = weights // ADDED
+        )
+      } else {
+        predictSimilar(
+          recentFeatures = recentFeatures,
+          productModels = productModels,
+          query = query,
+          whiteList = whiteList,
+          blackList = finalBlackList,
+          weights = weights // ADDED
+        )
+      }
+    }
+
+    val itemScores = topScores.map { case (i, s) =>
+      new ItemScore(
+        // convert item int index back to string ID
+        item = model.itemIntStringMap(i),
+        score = s
+      )
+    }
+
+    new PredictedResult(itemScores)
+  }
+
+  /** Generate final blackList based on other constraints */
+  def genBlackList(query: Query): Set[String] = {
+    // if unseenOnly is True, get all seen items
+    val seenItems: Set[String] = if (ap.unseenOnly) {
+
+      // get all user item events which are considered as "seen" events
+      val seenEvents: Iterator[Event] = try {
+        LEventStore.findByEntity(
+          appName = ap.appName,
+          entityType = "user",
+          entityId = query.user,
+          eventNames = Some(ap.seenEvents),
+          targetEntityType = Some(Some("item")),
+          // set time limit to avoid super long DB access
+          timeout = Duration(200, "millis")
+        )
+      } catch {
+        case e: scala.concurrent.TimeoutException =>
+          logger.error(s"Timeout when read seen events." +
+            s" Empty list is used. ${e}")
+          Iterator[Event]()
+        case e: Exception =>
+          logger.error(s"Error when read seen events: ${e}")
+          throw e
+      }
+
+      seenEvents.map { event =>
+        try {
+          event.targetEntityId.get
+        } catch {
+          case e: Exception => {
+            logger.error(s"Can't get targetEntityId of event ${event}.")
+            throw e
+          }
+        }
+      }.toSet
+    } else {
+      Set[String]()
+    }
+
+    // get the latest constraint unavailableItems $set event
+    val unavailableItems: Set[String] = try {
+      val constr = LEventStore.findByEntity(
+        appName = ap.appName,
+        entityType = "constraint",
+        entityId = "unavailableItems",
+        eventNames = Some(Seq("$set")),
+        limit = Some(1),
+        latest = true,
+        timeout = Duration(200, "millis")
+      )
+      if (constr.hasNext) {
+        constr.next.properties.get[Set[String]]("items")
+      } else {
+        Set[String]()
+      }
+    } catch {
+      case e: scala.concurrent.TimeoutException =>
+        logger.error(s"Timeout when read set unavailableItems event." +
+          s" Empty list is used. ${e}")
+        Set[String]()
+      case e: Exception =>
+        logger.error(s"Error when read set unavailableItems event: ${e}")
+        throw e
+    }
+
+    // combine query's blackList,seenItems and unavailableItems
+    // into final blackList.
+    query.blackList.getOrElse(Set[String]()) ++ seenItems ++ unavailableItems
+  }
+
+  // ADDED
+  /** Get the latest constraint weightedItems */
+  def weightedItems: Seq[WeightGroup] = {
+    try {
+      val constr = LEventStore.findByEntity(
+        appName = ap.appName,
+        entityType = "constraint",
+        entityId = "weightedItems",
+        eventNames = Some(Seq("$set")),
+        limit = Some(1),
+        latest = true,
+        timeout = Duration(200, "millis")
+      )
+      if (constr.hasNext) {
+        constr.next.properties.get[Seq[WeightGroup]]("weights")
+      } else {
+        Nil
+      }
+    } catch {
+      case e: scala.concurrent.TimeoutException =>
+        logger.error(s"Timeout when read set weightedItems event." +
+          s" Empty list is used. ${e}")
+        Nil
+      case e: Exception =>
+        logger.error(s"Error when read set weightedItems event: ${e}")
+        throw e
+    }
+  }
+
+  /** Get recent events of the user on items for recommending similar items */
+  def getRecentItems(query: Query): Set[String] = {
+    // get latest 10 user view item events
+    val recentEvents = try {
+      LEventStore.findByEntity(
+        appName = ap.appName,
+        // entityType and entityId is specified for fast lookup
+        entityType = "user",
+        entityId = query.user,
+        eventNames = Some(ap.similarEvents),
+        targetEntityType = Some(Some("item")),
+        limit = Some(10),
+        latest = true,
+        // set time limit to avoid super long DB access
+        timeout = Duration(200, "millis")
+      )
+    } catch {
+      case e: scala.concurrent.TimeoutException =>
+        logger.error(s"Timeout when read recent events." +
+          s" Empty list is used. ${e}")
+        Iterator[Event]()
+      case e: Exception =>
+        logger.error(s"Error when read recent events: ${e}")
+        throw e
+    }
+
+    val recentItems: Set[String] = recentEvents.map { event =>
+      try {
+        event.targetEntityId.get
+      } catch {
+        case e: Exception => {
+          logger.error("Can't get targetEntityId of event ${event}.")
+          throw e
+        }
+      }
+    }.toSet
+
+    recentItems
+  }
+
+  /** Prediction for user with known feature vector */
+  def predictKnownUser(
+    userFeature: Array[Double],
+    productModels: Map[Int, ProductModel],
+    query: Query,
+    whiteList: Option[Set[Int]],
+    blackList: Set[Int],
+    weights: Map[Int, Double] // ADDED
+  ): Array[(Int, Double)] = {
+    val indexScores: Map[Int, Double] = productModels.par // convert to parallel collection
+      .filter { case (i, pm) =>
+        pm.features.isDefined &&
+        isCandidateItem(
+          i = i,
+          item = pm.item,
+          categories = query.categories,
+          whiteList = whiteList,
+          blackList = blackList
+        )
+      }
+      .map { case (i, pm) =>
+        // NOTE: features must be defined, so can call .get
+        val s = dotProduct(userFeature, pm.features.get)
+        // may customize here to further adjust score
+        // ADDED
+        val adjustedScore = s * weights(i)
+        (i, adjustedScore)
+      }
+      .filter(_._2 > 0) // only keep items with score > 0
+      .seq // convert back to sequential collection
+
+    val ord = Ordering.by[(Int, Double), Double](_._2).reverse
+    val topScores = getTopN(indexScores, query.num)(ord).toArray
+
+    topScores
+  }
+
+  /** Default prediction when know nothing about the user */
+  def predictDefault(
+    productModels: Map[Int, ProductModel],
+    query: Query,
+    whiteList: Option[Set[Int]],
+    blackList: Set[Int],
+    weights: Map[Int, Double] // ADDED
+  ): Array[(Int, Double)] = {
+    val indexScores: Map[Int, Double] = productModels.par // convert back to sequential collection
+      .filter { case (i, pm) =>
+        isCandidateItem(
+          i = i,
+          item = pm.item,
+          categories = query.categories,
+          whiteList = whiteList,
+          blackList = blackList
+        )
+      }
+      .map { case (i, pm) =>
+        // may customize here to further adjust score
+        // ADDED
+        val s = pm.count.toDouble
+        val adjustedScore = s * weights(i)
+        (i, adjustedScore)
+      }
+      .seq
+
+    val ord = Ordering.by[(Int, Double), Double](_._2).reverse
+    val topScores = getTopN(indexScores, query.num)(ord).toArray
+
+    topScores
+  }
+
+  /** Return top similar items based on items user recently has action on */
+  def predictSimilar(
+    recentFeatures: Vector[Array[Double]],
+    productModels: Map[Int, ProductModel],
+    query: Query,
+    whiteList: Option[Set[Int]],
+    blackList: Set[Int],
+    weights: Map[Int, Double] // ADDED
+  ): Array[(Int, Double)] = {
+    val indexScores: Map[Int, Double] = productModels.par // convert to parallel collection
+      .filter { case (i, pm) =>
+        pm.features.isDefined &&
+        isCandidateItem(
+          i = i,
+          item = pm.item,
+          categories = query.categories,
+          whiteList = whiteList,
+          blackList = blackList
+        )
+      }
+      .map { case (i, pm) =>
+        val s = recentFeatures.map{ rf =>
+          // pm.features must be defined because of filter logic above
+          cosine(rf, pm.features.get)
+        }.reduce(_ + _)
+        // may customize here to further adjust score
+        // ADDED
+        val adjustedScore = s * weights(i)
+        (i, adjustedScore)
+      }
+      .filter(_._2 > 0) // keep items with score > 0
+      .seq // convert back to sequential collection
+
+    val ord = Ordering.by[(Int, Double), Double](_._2).reverse
+    val topScores = getTopN(indexScores, query.num)(ord).toArray
+
+    topScores
+  }
+
+  private
+  def getTopN[T](s: Iterable[T], n: Int)(implicit ord: Ordering[T]): Seq[T] = {
+
+    val q = PriorityQueue()
+
+    for (x <- s) {
+      if (q.size < n)
+        q.enqueue(x)
+      else {
+        // q is full
+        if (ord.compare(x, q.head) < 0) {
+          q.dequeue()
+          q.enqueue(x)
+        }
+      }
+    }
+
+    q.dequeueAll.toSeq.reverse
+  }
+
+  private
+  def dotProduct(v1: Array[Double], v2: Array[Double]): Double = {
+    val size = v1.size
+    var i = 0
+    var d: Double = 0
+    while (i < size) {
+      d += v1(i) * v2(i)
+      i += 1
+    }
+    d
+  }
+
+  private
+  def cosine(v1: Array[Double], v2: Array[Double]): Double = {
+    val size = v1.size
+    var i = 0
+    var n1: Double = 0
+    var n2: Double = 0
+    var d: Double = 0
+    while (i < size) {
+      n1 += v1(i) * v1(i)
+      n2 += v2(i) * v2(i)
+      d += v1(i) * v2(i)
+      i += 1
+    }
+    val n1n2 = (math.sqrt(n1) * math.sqrt(n2))
+    if (n1n2 == 0) 0 else (d / n1n2)
+  }
+
+  private
+  def isCandidateItem(
+    i: Int,
+    item: Item,
+    categories: Option[Set[String]],
+    whiteList: Option[Set[Int]],
+    blackList: Set[Int]
+  ): Boolean = {
+    // can add other custom filtering here
+    whiteList.map(_.contains(i)).getOrElse(true) &&
+    !blackList.contains(i) &&
+    // filter categories
+    categories.map { cat =>
+      item.categories.map { itemCat =>
+        // keep this item if has ovelap categories with the query
+        !(itemCat.toSet.intersect(cat).isEmpty)
+      }.getOrElse(false) // discard this item if it has no categories
+    }.getOrElse(true)
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Engine.scala b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Engine.scala
new file mode 100644
index 0000000..1949a98
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Engine.scala
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.examples.ecommercerecommendation
+
+import org.apache.predictionio.controller.EngineFactory
+import org.apache.predictionio.controller.Engine
+
+case class Query(
+  user: String,
+  num: Int,
+  categories: Option[Set[String]],
+  whiteList: Option[Set[String]],
+  blackList: Option[Set[String]]
+) extends Serializable
+
+case class PredictedResult(
+  itemScores: Array[ItemScore]
+) extends Serializable
+
+case class ItemScore(
+  item: String,
+  score: Double
+) extends Serializable
+
+object ECommerceRecommendationEngine extends EngineFactory {
+  def apply() = {
+    new Engine(
+      classOf[DataSource],
+      classOf[Preparator],
+      Map("ecomm" -> classOf[ECommAlgorithm]),
+      classOf[Serving])
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Preparator.scala b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Preparator.scala
new file mode 100644
index 0000000..585aaea
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Preparator.scala
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.examples.ecommercerecommendation
+
+import org.apache.predictionio.controller.PPreparator
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+class Preparator
+  extends PPreparator[TrainingData, PreparedData] {
+
+  def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
+    new PreparedData(
+      users = trainingData.users,
+      items = trainingData.items,
+      viewEvents = trainingData.viewEvents,
+      buyEvents = trainingData.buyEvents)
+  }
+}
+
+class PreparedData(
+  val users: RDD[(String, User)],
+  val items: RDD[(String, Item)],
+  val viewEvents: RDD[ViewEvent],
+  val buyEvents: RDD[BuyEvent]
+) extends Serializable

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Serving.scala b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Serving.scala
new file mode 100644
index 0000000..711f4f3
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Serving.scala
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.examples.ecommercerecommendation
+
+import org.apache.predictionio.controller.LServing
+
+class Serving
+  extends LServing[Query, PredictedResult] {
+
+  override
+  def serve(query: Query,
+    predictedResults: Seq[PredictedResult]): PredictedResult = {
+    predictedResults.head
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/adjust-score/template.json
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/template.json b/examples/scala-parallel-ecommercerecommendation/adjust-score/template.json
new file mode 100644
index 0000000..d076ec5
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/template.json
@@ -0,0 +1 @@
+{"pio": {"version": { "min": "0.10.0-incubating" }}}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/README.md
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/README.md b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/README.md
deleted file mode 100644
index 67ca44e..0000000
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/README.md
+++ /dev/null
@@ -1,40 +0,0 @@
-<!--
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements.  See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-# E-Commerce Recommendation Template with rate event as training data.
-
-This examples demonstrates how to modify E-Commerce Recommendation template to use "rate" event as Training Data.
-
-However, recent "view" event is still used for recommendation for new user (to recommend items similar to what new user just recently viewed) and the returned scores are not predicted rating but a ranked scores for new user.
-
-This template also supports that the user may rate same item multiple times and latest rating value will be used for training. The modification can be further simplified if the support of this case is not needed.
-
-The modification is based on E-Commerce Recommendation template v0.1.1.
-
-You can find the complete modified source code in `src/` directory.
-
-## Documentation
-
-Please refer to http://predictionio.incubator.apache.org/templates/ecommercerecommendation/quickstart/
-and
-http://predictionio.incubator.apache.org/templates/ecommercerecommendation/train-with-rate-event/
-
-### import sample data
-
-```
-$ python data/import_eventserver.py --access_key <your_access_key>
-```

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/build.sbt b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/build.sbt
index 956f79b..85b21d6 100644
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/build.sbt
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/build.sbt
@@ -15,15 +15,10 @@
  * limitations under the License.
  */
 
-import AssemblyKeys._
-
-assemblySettings
-
 name := "template-scala-parallel-ecommercerecommendation"
 
 organization := "org.apache.predictionio"
-
+scalaVersion := "2.11.8"
 libraryDependencies ++= Seq(
-  "org.apache.predictionio"    %% "core"          % pioVersion.value  % "provided",
-  "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
-  "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")
+  "org.apache.predictionio" %% "apache-predictionio-core" % "0.11.0-incubating" % "provided",
+  "org.apache.spark"        %% "spark-mllib"              % "2.1.1" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/data/import_eventserver.py
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/data/import_eventserver.py b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/data/import_eventserver.py
index e813776..6233809 100644
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/data/import_eventserver.py
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/data/import_eventserver.py
@@ -28,13 +28,13 @@ SEED = 3
 def import_events(client):
   random.seed(SEED)
   count = 0
-  print client.get_status()
-  print "Importing data..."
+  print(client.get_status())
+  print("Importing data...")
 
   # generate 10 users, with user ids u1,u2,....,u10
   user_ids = ["u%s" % i for i in range(1, 11)]
   for user_id in user_ids:
-    print "Set user", user_id
+    print("Set user", user_id)
     client.create_event(
       event="$set",
       entity_type="user",
@@ -47,7 +47,7 @@ def import_events(client):
   categories = ["c%s" % i for i in range(1, 7)]
   item_ids = ["i%s" % i for i in range(1, 51)]
   for item_id in item_ids:
-    print "Set item", item_id
+    print("Set item", item_id)
     client.create_event(
       event="$set",
       entity_type="item",
@@ -61,7 +61,7 @@ def import_events(client):
   # each user randomly viewed 10 items
   for user_id in user_ids:
     for viewed_item in random.sample(item_ids, 10):
-      print "User", user_id ,"views item", viewed_item
+      print("User", user_id ,"views item", viewed_item)
       client.create_event(
         event="view",
         entity_type="user",
@@ -73,7 +73,7 @@ def import_events(client):
       # randomly rate some of the viewed items
       if random.choice([True, False]):
         rating = random.choice(range(1,6))
-        print "User", user_id ,"rates item", viewed_item, "rating", rating
+        print("User", user_id ,"rates item", viewed_item, "rating", rating)
         client.create_event(
           event="rate",
           entity_type="user",
@@ -87,7 +87,7 @@ def import_events(client):
         count += 1
       # randomly buy some of the viewed items
       if random.choice([True, False]):
-        print "User", user_id ,"buys item", viewed_item
+        print("User", user_id ,"buys item", viewed_item)
         client.create_event(
           event="buy",
           entity_type="user",
@@ -97,7 +97,7 @@ def import_events(client):
         )
         count += 1
 
-  print "%s events are imported." % count
+  print("%s events are imported." % count)
 
 if __name__ == '__main__':
   parser = argparse.ArgumentParser(
@@ -106,7 +106,7 @@ if __name__ == '__main__':
   parser.add_argument('--url', default="http://localhost:7070")
 
   args = parser.parse_args()
-  print args
+  print(args)
 
   client = predictionio.EventClient(
     access_key=args.access_key,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/data/send_query.py
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/data/send_query.py b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/data/send_query.py
index 21e9146..c3a5dd2 100644
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/data/send_query.py
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/data/send_query.py
@@ -21,4 +21,4 @@ Send sample query to prediction engine
 
 import predictionio
 engine_client = predictionio.EngineClient(url="http://localhost:8000")
-print engine_client.send_query({"user": "u1", "num": 4})
+print(engine_client.send_query({"user": "u1", "num": 4}))

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/engine.json
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/engine.json b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/engine.json
index d404a1b..71b1630 100644
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/engine.json
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/engine.json
@@ -1,19 +1,20 @@
 {
   "id": "default",
   "description": "Default settings",
-  "engineFactory": "org.template.ecommercerecommendation.ECommerceRecommendationEngine",
+  "engineFactory": "org.apache.predictionio.examples.ecommercerecommendation.ECommerceRecommendationEngine",
   "datasource": {
     "params" : {
-      "appId": 12
+      "appName": "MyApp1"
     }
   },
   "algorithms": [
     {
-      "name": "als",
+      "name": "ecomm",
       "params": {
-        "appId": 12,
+        "appName": "MyApp1",
         "unseenOnly": true,
         "seenEvents": ["buy", "view"],
+        "similarEvents": ["view"],
         "rank": 10,
         "numIterations" : 20,
         "lambda": 0.01,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/pio.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/pio.sbt b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/pio.sbt
deleted file mode 100644
index 4303be7..0000000
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/pio.sbt
+++ /dev/null
@@ -1,4 +0,0 @@
-// Generated automatically by pio build.
-// Changes in this file will be overridden.
-
-pioVersion := "0.8.7-SNAPSHOT"

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/assembly.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/assembly.sbt b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/assembly.sbt
index 54c3252..e17409e 100644
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/assembly.sbt
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/assembly.sbt
@@ -1 +1 @@
-addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2")
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.4")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/build.properties
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/build.properties b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/build.properties
new file mode 100644
index 0000000..64317fd
--- /dev/null
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/build.properties
@@ -0,0 +1 @@
+sbt.version=0.13.15

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/76f34090/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/pio-build.sbt b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/pio-build.sbt
deleted file mode 100644
index 9aed0ee..0000000
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/pio-build.sbt
+++ /dev/null
@@ -1 +0,0 @@
-addSbtPlugin("org.apache.predictionio" % "pio-build" % "0.9.0")


Mime
View raw message