predictionio-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From don...@apache.org
Subject [50/52] [abbrv] incubator-predictionio git commit: Renamed directory testing to tests
Date Tue, 09 Aug 2016 21:43:58 GMT
Renamed directory testing to tests


Project: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/commit/5320724a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/tree/5320724a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/diff/5320724a

Branch: refs/heads/develop
Commit: 5320724aa0301f3b665ddd9af77351e3c104119c
Parents: bea0121
Author: Marcin Ziemiński <zieminm@gmail.com>
Authored: Fri Aug 5 11:58:46 2016 -0700
Committer: Marcin Ziemiński <zieminm@gmail.com>
Committed: Fri Aug 5 11:58:46 2016 -0700

----------------------------------------------------------------------
 .travis.yml                                     |    6 +-
 testing/Dockerfile                              |   89 --
 testing/README.md                               |   31 -
 testing/after_script.travis.sh                  |    7 -
 testing/before_script.travis.sh                 |   25 -
 testing/docker-files/env-conf/hbase-site.xml    |   12 -
 testing/docker-files/env-conf/pio-env.sh        |   87 -
 .../docker-files/env-conf/spark-defaults.conf   |   13 -
 testing/docker-files/env-conf/spark-env.sh      |   49 -
 testing/docker-files/init.sh                    |   40 -
 testing/pio_tests/README.md                     |   43 -
 testing/pio_tests/__init__.py                   |    0
 .../partially_malformed_events.json             |   10 -
 .../data/eventserver_test/rate_events_25.json   |  278 ----
 .../data/eventserver_test/signup_events_51.json |  257 ---
 .../pio_tests/data/quickstart_test/engine.json  |   21 -
 .../engines/recommendation-engine/README.md     |   42 -
 .../engines/recommendation-engine/build.sbt     |   12 -
 .../data/import_eventserver.py                  |   56 -
 .../data/sample_movielens_data.txt              | 1501 ------------------
 .../recommendation-engine/data/send_query.py    |    7 -
 .../engines/recommendation-engine/engine.json   |   21 -
 .../engines/recommendation-engine/manifest.json |    1 -
 .../recommendation-engine/project/assembly.sbt  |    1 -
 .../recommendation-engine/project/pio-build.sbt |    1 -
 .../src/main/scala/ALSAlgorithm.scala           |  138 --
 .../src/main/scala/ALSModel.scala               |   63 -
 .../src/main/scala/DataSource.scala             |  103 --
 .../src/main/scala/Engine.scala                 |   32 -
 .../src/main/scala/Evaluation.scala             |   89 --
 .../src/main/scala/Preparator.scala             |   19 -
 .../src/main/scala/Serving.scala                |   13 -
 .../engines/recommendation-engine/template.json |    1 -
 testing/pio_tests/globals.py                    |   17 -
 testing/pio_tests/integration.py                |   46 -
 testing/pio_tests/scenarios/__init__.py         |    0
 .../pio_tests/scenarios/basic_app_usecases.py   |  154 --
 testing/pio_tests/scenarios/eventserver_test.py |  155 --
 testing/pio_tests/scenarios/quickstart_test.py  |  125 --
 testing/pio_tests/tests.py                      |   80 -
 testing/pio_tests/utils.py                      |  309 ----
 testing/run_docker.sh                           |   39 -
 testing/script.travis.sh                        |   21 -
 tests/Dockerfile                                |   89 ++
 tests/README.md                                 |   31 +
 tests/after_script.travis.sh                    |    7 +
 tests/before_script.travis.sh                   |   25 +
 tests/docker-files/env-conf/hbase-site.xml      |   12 +
 tests/docker-files/env-conf/pio-env.sh          |   87 +
 tests/docker-files/env-conf/spark-defaults.conf |   13 +
 tests/docker-files/env-conf/spark-env.sh        |   49 +
 tests/docker-files/init.sh                      |   40 +
 tests/pio_tests/README.md                       |   43 +
 tests/pio_tests/__init__.py                     |    0
 .../partially_malformed_events.json             |   10 +
 .../data/eventserver_test/rate_events_25.json   |  278 ++++
 .../data/eventserver_test/signup_events_51.json |  257 +++
 .../pio_tests/data/quickstart_test/engine.json  |   21 +
 .../engines/recommendation-engine/README.md     |   42 +
 .../engines/recommendation-engine/build.sbt     |   12 +
 .../data/import_eventserver.py                  |   56 +
 .../data/sample_movielens_data.txt              | 1501 ++++++++++++++++++
 .../recommendation-engine/data/send_query.py    |    7 +
 .../engines/recommendation-engine/engine.json   |   21 +
 .../engines/recommendation-engine/manifest.json |    1 +
 .../recommendation-engine/project/assembly.sbt  |    1 +
 .../recommendation-engine/project/pio-build.sbt |    1 +
 .../src/main/scala/ALSAlgorithm.scala           |  138 ++
 .../src/main/scala/ALSModel.scala               |   63 +
 .../src/main/scala/DataSource.scala             |  103 ++
 .../src/main/scala/Engine.scala                 |   32 +
 .../src/main/scala/Evaluation.scala             |   89 ++
 .../src/main/scala/Preparator.scala             |   19 +
 .../src/main/scala/Serving.scala                |   13 +
 .../engines/recommendation-engine/template.json |    1 +
 tests/pio_tests/globals.py                      |   17 +
 tests/pio_tests/integration.py                  |   46 +
 tests/pio_tests/scenarios/__init__.py           |    0
 tests/pio_tests/scenarios/basic_app_usecases.py |  154 ++
 tests/pio_tests/scenarios/eventserver_test.py   |  155 ++
 tests/pio_tests/scenarios/quickstart_test.py    |  125 ++
 tests/pio_tests/tests.py                        |   80 +
 tests/pio_tests/utils.py                        |  309 ++++
 tests/run_docker.sh                             |   39 +
 tests/script.travis.sh                          |   21 +
 85 files changed, 4011 insertions(+), 4011 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/.travis.yml
----------------------------------------------------------------------
diff --git a/.travis.yml b/.travis.yml
index c5974ea..411733f 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -56,10 +56,10 @@ before_install:
   - service haveged start
 
 before_script:
-  - ./testing/before_script.travis.sh
+  - ./tests/before_script.travis.sh
 
 script:
-  - ./testing/script.travis.sh
+  - ./tests/script.travis.sh
 
 after_script:
-  - ./testing/after_script.travis.sh
+  - ./tests/after_script.travis.sh

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/Dockerfile
----------------------------------------------------------------------
diff --git a/testing/Dockerfile b/testing/Dockerfile
deleted file mode 100644
index fc3d89d..0000000
--- a/testing/Dockerfile
+++ /dev/null
@@ -1,89 +0,0 @@
-from ubuntu
-
-ENV SPARK_VERSION 1.4.0
-ENV ELASTICSEARCH_VERSION 1.4.4
-ENV HBASE_VERSION 1.0.0
-
-RUN echo "== Updating system =="
-RUN apt-get update -y
-RUN echo "== Downloading packages =="
-RUN apt-get install -y \
-    wget curl \
-    python-pip \
-    python3-pip \
-    postgresql postgresql-contrib \
-    openjdk-8-jdk \
-    openssh-client openssh-server
-
-RUN pip install predictionio
-RUN pip3 install --upgrade pip
-RUN pip3 install xmlrunner
-RUN pip3 install --upgrade requests
-RUN pip3 install --upgrade urllib3
-
-ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64/jre
-
-RUN echo "== Installing Spark =="
-RUN mkdir vendors
-RUN wget http://d3kbcqa49mib13.cloudfront.net/spark-${SPARK_VERSION}-bin-hadoop2.6.tgz
-RUN tar zxvfC spark-${SPARK_VERSION}-bin-hadoop2.6.tgz /vendors
-RUN rm spark-${SPARK_VERSION}-bin-hadoop2.6.tgz
-ENV SPARK_HOME /vendors/spark-${SPARK_VERSION}-bin-hadoop2.6
-
-RUN echo "== Installing Elasticsearch =="
-RUN wget https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz
-RUN tar zxvfC elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz /vendors
-RUN rm elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz
-ENV ELASTICSEARCH_HOME /vendors/elasticsearch-${ELASTICSEARCH_VERSION}
-
-RUN echo "== Installing HBase =="
-RUN wget http://archive.apache.org/dist/hbase/hbase-${HBASE_VERSION}/hbase-${HBASE_VERSION}-bin.tar.gz
-RUN tar zxvfC hbase-${HBASE_VERSION}-bin.tar.gz /vendors
-RUN rm hbase-${HBASE_VERSION}-bin.tar.gz
-ENV HBASE_HOME /vendors/hbase-${HBASE_VERSION}
-
-RUN echo "== Downloading database drivers =="
-RUN mkdir drivers
-RUN wget https://jdbc.postgresql.org/download/postgresql-9.4-1204.jdbc41.jar -P /drivers
-
-RUN mkdir PredictionIO
-ENV PIO_HOME /PredictionIO
-ENV PATH ${PIO_HOME}/bin/:${PATH}
-ENV HOST_PIO_HOME /pio_host
-
-RUN echo "== Setting configs =="
-COPY docker-files/init.sh init.sh
-COPY docker-files/env-conf/spark-env.sh ${SPARK_HOME}/conf/spark-env.sh
-COPY docker-files/env-conf/hbase-site.xml ${HBASE_HOME}/conf/hbase-site.xml
-COPY docker-files/env-conf/pio-env.sh /pio-env.sh
-
-# Default repositories setup
-ENV PIO_STORAGE_REPOSITORIES_METADATA_SOURCE PGSQL
-ENV PIO_STORAGE_REPOSITORIES_EVENTDATA_SOURCE PGSQL
-ENV PIO_STORAGE_REPOSITORIES_MODELDATA_SOURCE PGSQL
-
-# JVM settings
-ENV JVM_OPTS '-Dfile.encoding=UTF8 -Xms2048M -Xmx2048M -Xss8M -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256M'
-
-# Expose relevant ports
-# pio engine
-EXPOSE 8000
-# eventserver
-EXPOSE 7070
-# spark master UI
-EXPOSE 8080
-# spark worker UI
-EXPOSE 8081
-# spark context UI
-EXPOSE 4040
-# HMaster
-EXPOSE 60000
-# HMaster Info Web UI
-EXPOSE 60010
-# Region Server
-Expose 60020
-# Region Server Http
-EXPOSE 60030
-
-ENTRYPOINT ["/init.sh"]
-CMD 'bash'

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/README.md
----------------------------------------------------------------------
diff --git a/testing/README.md b/testing/README.md
deleted file mode 100644
index a9e5dea..0000000
--- a/testing/README.md
+++ /dev/null
@@ -1,31 +0,0 @@
-# Testing PredictionIO
-
-Intention of this subdirectory is to amass different types of tests other than unit-tests and also to make developers life easier giving them with means to check the application deterministically for different configurations.
-Moreover, it provides testing scenarios for **TravisCI** to be run on pull requests and commits.
-
-
-## Integration Tests
-These tests are mostly user-functionality tests. They check logic and reliability of the system.
-In order to get familiar with their structure, please see [README](pio_tests/README.md).
-
-## Docker image
-After introducing some changes, a developer would like to try them against different configurations, namely to see if everything works as expected e.g. when you change the data repository for the events or meta-data.
-A good way to that is to use the docker image with installed and running dependencies.
-
-To download the image run:
-```
-$ docker pull ziemin/pio-testing
-```
-
-The most convenient way to make use of it is to execute ***run_docker.sh*** script passing it the configuration, the path to PredictionIO's repository with archived snapshot and the command to run. When no command is provided it opens a bash shell inside the docker image. Example of usage:
-```sh
-$ ./run_docker.sh ELASTICSEARCH HBASE LOCALFS \ 
-    ~/projects/incubator-preadictionio "echo 'All tests passed...'"
-```
-
-Directory structure inside the image:
-* ***/PredictionIO*** - extracted snapshot
-* ***/pio_host*** - mounted path to repository
-* ***/tests/pio_tests*** - copy of integration tests
-* ***/vendors*** - directory with installed services
-* ***/drivers*** - jars with database drivers

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/after_script.travis.sh
----------------------------------------------------------------------
diff --git a/testing/after_script.travis.sh b/testing/after_script.travis.sh
deleted file mode 100755
index fdc635f..0000000
--- a/testing/after_script.travis.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash -
-
-set -e
-
-if [[ $BUILD_TYPE == Unit ]]; then
-  ./bin/travis/pio-stop-travis
-fi

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/before_script.travis.sh
----------------------------------------------------------------------
diff --git a/testing/before_script.travis.sh b/testing/before_script.travis.sh
deleted file mode 100755
index d7f9cef..0000000
--- a/testing/before_script.travis.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash -
-
-set -e
-
-if [[ $BUILD_TYPE == Unit ]]; then
-
-  # Download spark, hbase
-  mkdir vendors
-  wget http://d3kbcqa49mib13.cloudfront.net/spark-1.3.0-bin-hadoop2.4.tgz
-  tar zxfC spark-1.3.0-bin-hadoop2.4.tgz vendors
-  wget http://archive.apache.org/dist/hbase/hbase-1.0.0/hbase-1.0.0-bin.tar.gz
-  tar zxfC hbase-1.0.0-bin.tar.gz vendors
-
-  # Prepare pio environment variables
-  set -a
-  source conf/pio-env.sh.travis
-  set +a
-
-  # Create postgres database for PredictionIO
-  psql -c 'create database predictionio;' -U postgres
-  ./bin/travis/pio-start-travis
-
-else # Integration Tests
-  ./make-distribution.sh
-fi

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/docker-files/env-conf/hbase-site.xml
----------------------------------------------------------------------
diff --git a/testing/docker-files/env-conf/hbase-site.xml b/testing/docker-files/env-conf/hbase-site.xml
deleted file mode 100644
index af3ab4f..0000000
--- a/testing/docker-files/env-conf/hbase-site.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<configuration>
-  <property>
-    <name>hbase.rootdir</name>
-    <value>file:///hbase-files/data</value>
-  </property>
-  <property>
-    <name>hbase.zookeeper.property.dataDir</name>
-    <value>/hbase-files/zookeeper</value>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/docker-files/env-conf/pio-env.sh
----------------------------------------------------------------------
diff --git a/testing/docker-files/env-conf/pio-env.sh b/testing/docker-files/env-conf/pio-env.sh
deleted file mode 100644
index 8391e97..0000000
--- a/testing/docker-files/env-conf/pio-env.sh
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env bash
-
-# Copy this file as pio-env.sh and edit it for your site's configuration.
-
-# PredictionIO Main Configuration
-#
-# This section controls core behavior of PredictionIO. It is very likely that
-# you need to change these to fit your site.
-
-# SPARK_HOME: Apache Spark is a hard dependency and must be configured.
-SPARK_HOME=$SPARK_HOME
-
-POSTGRES_JDBC_DRIVER=/drivers/postgresql-9.4-1204.jdbc41.jar
-MYSQL_JDBC_DRIVER=
-
-# ES_CONF_DIR: You must configure this if you have advanced configuration for
-#              your Elasticsearch setup.
-# ES_CONF_DIR=/opt/elasticsearch
-
-# HADOOP_CONF_DIR: You must configure this if you intend to run PredictionIO
-#                  with Hadoop 2.
-# HADOOP_CONF_DIR=/opt/hadoop
-
-# HBASE_CONF_DIR: You must configure this if you intend to run PredictionIO
-#                 with HBase on a remote cluster.
-HBASE_CONF_DIR=$HBASE_HOME/conf
-
-# Filesystem paths where PredictionIO uses as block storage.
-PIO_FS_BASEDIR=$HOME/.pio_store
-PIO_FS_ENGINESDIR=$PIO_FS_BASEDIR/engines
-PIO_FS_TMPDIR=$PIO_FS_BASEDIR/tmp
-
-# PredictionIO Storage Configuration
-#
-# This section controls programs that make use of PredictionIO's built-in
-# storage facilities. Default values are shown below.
-#
-# For more information on storage configuration please refer to
-# https://docs.prediction.io/system/anotherdatastore/
-
-# Storage Repositories
-
-# Default is to use PostgreSQL
-PIO_STORAGE_REPOSITORIES_METADATA_NAME=pio_meta
-PIO_STORAGE_REPOSITORIES_METADATA_SOURCE=$PIO_STORAGE_REPOSITORIES_METADATA_SOURCE
-
-PIO_STORAGE_REPOSITORIES_EVENTDATA_NAME=pio_event
-PIO_STORAGE_REPOSITORIES_EVENTDATA_SOURCE=$PIO_STORAGE_REPOSITORIES_EVENTDATA_SOURCE
-
-PIO_STORAGE_REPOSITORIES_MODELDATA_NAME=pio_model
-PIO_STORAGE_REPOSITORIES_MODELDATA_SOURCE=$PIO_STORAGE_REPOSITORIES_MODELDATA_SOURCE
-
-# Storage Data Sources
-
-# PostgreSQL Default Settings
-# Please change "pio" to your database name in PIO_STORAGE_SOURCES_PGSQL_URL
-# Please change PIO_STORAGE_SOURCES_PGSQL_USERNAME and
-# PIO_STORAGE_SOURCES_PGSQL_PASSWORD accordingly
-PIO_STORAGE_SOURCES_PGSQL_TYPE=jdbc
-PIO_STORAGE_SOURCES_PGSQL_URL=jdbc:postgresql://localhost/pio
-PIO_STORAGE_SOURCES_PGSQL_USERNAME=pio
-PIO_STORAGE_SOURCES_PGSQL_PASSWORD=pio
-
-# MySQL Example
-# PIO_STORAGE_SOURCES_MYSQL_TYPE=jdbc
-# PIO_STORAGE_SOURCES_MYSQL_URL=jdbc:mysql://localhost/pio
-# PIO_STORAGE_SOURCES_MYSQL_USERNAME=pio
-# PIO_STORAGE_SOURCES_MYSQL_PASSWORD=pio
-
-# Elasticsearch Example
-PIO_STORAGE_SOURCES_ELASTICSEARCH_TYPE=elasticsearch
-#PIO_STORAGE_SOURCES_ELASTICSEARCH_CLUSTERNAME=pio
-PIO_STORAGE_SOURCES_ELASTICSEARCH_HOSTS=localhost
-PIO_STORAGE_SOURCES_ELASTICSEARCH_PORTS=9300
-PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME=$ELASTICSEARCH_HOME
-
-# Local File System Example
-PIO_STORAGE_SOURCES_LOCALFS_TYPE=localfs
-PIO_STORAGE_SOURCES_LOCALFS_PATH=$PIO_FS_BASEDIR/local_models
-
-# HBase Example
-PIO_STORAGE_SOURCES_HBASE_TYPE=hbase
-PIO_STORAGE_SOURCES_HBASE_HOME=$HBASE_HOME
-
-# HDFS config
-PIO_STORAGE_SOURCES_HDFS_TYPE=hdfs
-PIO_STORAGE_SOURCES_HDFS_PATH=/hdfs_models

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/docker-files/env-conf/spark-defaults.conf
----------------------------------------------------------------------
diff --git a/testing/docker-files/env-conf/spark-defaults.conf b/testing/docker-files/env-conf/spark-defaults.conf
deleted file mode 100644
index fcb1b15..0000000
--- a/testing/docker-files/env-conf/spark-defaults.conf
+++ /dev/null
@@ -1,13 +0,0 @@
-# Default system properties included when running spark-submit.
-# This is useful for setting default environmental settings.
-
-# Example:
-# spark.master                     spark://master:7077
-# spark.eventLog.enabled           true
-# spark.eventLog.dir               hdfs://namenode:8021/directory
-# spark.serializer                 org.apache.spark.serializer.KryoSerializer
-spark.driver.memory              10g
-spark.executor.memory            10g
-spark.driver.cores               4
-spark.ui.port                    4040
-# spark.executor.extraJavaOptions  -XX:+PrintGCDetails -Dkey=value -Dnumbers="one two three"

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/docker-files/env-conf/spark-env.sh
----------------------------------------------------------------------
diff --git a/testing/docker-files/env-conf/spark-env.sh b/testing/docker-files/env-conf/spark-env.sh
deleted file mode 100755
index 22e7a9c..0000000
--- a/testing/docker-files/env-conf/spark-env.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env bash
-
-# Options read when launching programs locally with
-# ./bin/run-example or ./bin/spark-submit
-# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files
-# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node
-# - SPARK_PUBLIC_DNS, to set the public dns name of the driver program
-# - SPARK_CLASSPATH, default classpath entries to append
-
-# Options read by executors and drivers running inside the cluster
-# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node
-# - SPARK_PUBLIC_DNS, to set the public DNS name of the driver program
-# - SPARK_CLASSPATH, default classpath entries to append
-# - SPARK_LOCAL_DIRS, storage directories to use on this node for shuffle and RDD data
-# - MESOS_NATIVE_JAVA_LIBRARY, to point to your libmesos.so if you use Mesos
-
-# Options read in YARN client mode
-# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files
-# - SPARK_EXECUTOR_INSTANCES, Number of workers to start (Default: 2)
-# - SPARK_EXECUTOR_CORES, Number of cores for the workers (Default: 1).
-# - SPARK_EXECUTOR_MEMORY, Memory per Worker (e.g. 1000M, 2G) (Default: 1G)
-# - SPARK_DRIVER_MEMORY, Memory for Master (e.g. 1000M, 2G) (Default: 1G)
-# - SPARK_YARN_APP_NAME, The name of your application (Default: Spark)
-# - SPARK_YARN_QUEUE, The hadoop queue to use for allocation requests (Default: ‘default’)
-# - SPARK_YARN_DIST_FILES, Comma separated list of files to be distributed with the job.
-# - SPARK_YARN_DIST_ARCHIVES, Comma separated list of archives to be distributed with the job.
-
-# Options for the daemons used in the standalone deploy mode
-# - SPARK_MASTER_IP, to bind the master to a different IP address or hostname
-# - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports for the master
-# - SPARK_MASTER_OPTS, to set config properties only for the master (e.g. "-Dx=y")
-# - SPARK_WORKER_CORES, to set the number of cores to use on this machine
-# - SPARK_WORKER_MEMORY, to set how much total memory workers have to give executors (e.g. 1000m, 2g)
-# - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT, to use non-default ports for the worker
-# - SPARK_WORKER_INSTANCES, to set the number of worker processes per node
-# - SPARK_WORKER_DIR, to set the working directory of worker processes
-# - SPARK_WORKER_OPTS, to set config properties only for the worker (e.g. "-Dx=y")
-# - SPARK_DAEMON_MEMORY, to allocate to the master, worker and history server themselves (default: 1g).
-# - SPARK_HISTORY_OPTS, to set config properties only for the history server (e.g. "-Dx=y")
-# - SPARK_SHUFFLE_OPTS, to set config properties only for the external shuffle service (e.g. "-Dx=y")
-# - SPARK_DAEMON_JAVA_OPTS, to set config properties for all daemons (e.g. "-Dx=y")
-# - SPARK_PUBLIC_DNS, to set the public dns name of the master or workers
-
-# Generic options for the daemons used in the standalone deploy mode
-# - SPARK_CONF_DIR      Alternate conf dir. (Default: ${SPARK_HOME}/conf)
-# - SPARK_LOG_DIR       Where log files are stored.  (Default: ${SPARK_HOME}/logs)
-# - SPARK_PID_DIR       Where the pid file is stored. (Default: /tmp)
-# - SPARK_IDENT_STRING  A string representing this instance of spark. (Default: $USER)
-# - SPARK_NICENESS      The scheduling priority for daemons. (Default: 0)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/docker-files/init.sh
----------------------------------------------------------------------
diff --git a/testing/docker-files/init.sh b/testing/docker-files/init.sh
deleted file mode 100755
index a78d2c2..0000000
--- a/testing/docker-files/init.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash -
-
-set -e
-
-echo '== Setting up Postgres... =='
-service postgresql start
-runuser postgres -c 'createuser -s root'
-createdb root
-
-psql -c "create user pio with password 'pio'" && createdb pio
-
-echo '== Starting SSH... =='
-service ssh start
-ssh-keygen -b 2048 -t rsa -q -f /root/.ssh/id_rsa -N ""
-cat /root/.ssh/id_rsa.pub >> /root/.ssh/authorized_keys
-
-echo '== Starting HBase... =='
-$HBASE_HOME/bin/start-hbase.sh
-
-echo '== Starting standalone Spark cluster... =='
-$SPARK_HOME/sbin/start-all.sh
-
-echo '== Starting Elasticsearch... =='
-$ELASTICSEARCH_HOME/bin/elasticsearch -d -p $PIO_HOME/es.pid
-
-echo '== Copying distribution to PIO_HOME... =='
-DISTRIBUTION_TAR=`find /pio_host -maxdepth 1 -name PredictionIO*SNAPSHOT.tar.gz | head -1`
-tar zxvfC $DISTRIBUTION_TAR /
-DIR_NAME=/`basename $DISTRIBUTION_TAR`
-DIR_NAME=${DIR_NAME%.tar.gz}
-mv $DIR_NAME/* $PIO_HOME/
-mv /pio-env.sh $PIO_HOME/conf/pio-env.sh
-
-echo '== Copying tests to a separate directory =='
-mkdir /tests
-cp -r /pio_host/testing/pio_tests /tests/pio_tests
-export PYTHONPATH=/tests:$PYTHONPATH
-
-# after initialization run given command
-eval $@

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/README.md
----------------------------------------------------------------------
diff --git a/testing/pio_tests/README.md b/testing/pio_tests/README.md
deleted file mode 100644
index c885ab5..0000000
--- a/testing/pio_tests/README.md
+++ /dev/null
@@ -1,43 +0,0 @@
-# PredictionIO - Integration Tests
-
-This python module introduces a basic framework for adding integration tests to
-PredictionIO. It is nothing more than a collection of utility functions mostly being wrappers
-over shell executed commands.
-
-### Prerequisites
-In order to execute tests, besides a configured **PredictionIO** environment one
-has to download the following python-3 packages:
-* requests
-* unittest
-* xmlrunner
-
-### Execution
-*tests.py* - the executable script. Launches eventserver to be available for the tests.
-You can pass it arguments to:
-* suppress the output of executed shell commands within the tests
-* enable logging
-* specify which tests should be exectued (by names)
-
-For more information run:
-```shell
-python3 tests.py -h
-```
-
-As soon as the tests are finishied an XML file with JUnit-like test reports 
-is created in the directory of execution.
-
-### Adding new tests
-Every test should be an instance of **pio_tests.integration.BaseTestCase** defined in **pio_tests.integration**.  
-Upon creation, a **pio_tests.integration.TestContext**  object is provided to it with description of:
-* ip address and a port of running eventserver
-* directories containing stored engines and data for specific tests
-
-Every test should be registered in the appropriate place in *tests.py* file, whereas
-its definition should reside in **pio_tests.scenarios** module. If the test requires some additional files
-during the execution, you should put them under *data* directory mentioned above.
-
-The best way to test different application engines is to make use of **pio_tests.utility.AppEngine**.
-Apart from containing utility functions, it downloads engine templates if necessary.
-
-To see an example of implemented test check **pio_tests.scenarios.quickstart_test**, which is
-a repetition of the QuickStart tutorial from the doc site.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/__init__.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/__init__.py b/testing/pio_tests/__init__.py
deleted file mode 100644
index e69de29..0000000

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/data/eventserver_test/partially_malformed_events.json
----------------------------------------------------------------------
diff --git a/testing/pio_tests/data/eventserver_test/partially_malformed_events.json b/testing/pio_tests/data/eventserver_test/partially_malformed_events.json
deleted file mode 100644
index f95bae4..0000000
--- a/testing/pio_tests/data/eventserver_test/partially_malformed_events.json
+++ /dev/null
@@ -1,10 +0,0 @@
-[
-  { 
-    "event" : "test",
-    "entityType" : "test",
-    "entityId" : "t2"
-  },
-  {
-    "event" : "malformed-event" 
-  }
-]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/data/eventserver_test/rate_events_25.json
----------------------------------------------------------------------
diff --git a/testing/pio_tests/data/eventserver_test/rate_events_25.json b/testing/pio_tests/data/eventserver_test/rate_events_25.json
deleted file mode 100644
index 3b97285..0000000
--- a/testing/pio_tests/data/eventserver_test/rate_events_25.json
+++ /dev/null
@@ -1,278 +0,0 @@
-[
-  {
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "1",
-    "targetEntityType" : "item",
-    "targetEntityId" : "1",
-    "properties" : {
-      "rating" : 5
-    },
-    "eventTime" : "2014-11-01T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "1",
-    "targetEntityType" : "item",
-    "targetEntityId" : "2",
-    "properties" : {
-      "rating" : 3
-    },
-    "eventTime" : "2014-11-02T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "1",
-    "targetEntityType" : "item",
-    "targetEntityId" : "3",
-    "properties" : {
-      "rating" : 1
-    },
-    "eventTime" : "2014-11-03T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "1",
-    "targetEntityType" : "item",
-    "targetEntityId" : "4",
-    "properties" : {
-      "rating" : 5
-    },
-    "eventTime" : "2014-11-04T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "1",
-    "targetEntityType" : "item",
-    "targetEntityId" : "5",
-    "properties" : {
-      "rating" : 3
-    },
-    "eventTime" : "2014-11-05T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "2",
-    "targetEntityType" : "item",
-    "targetEntityId" : "1",
-    "properties" : {
-      "rating" : 1
-    },
-    "eventTime" : "2014-11-01T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "2",
-    "targetEntityType" : "item",
-    "targetEntityId" : "2",
-    "properties" : {
-      "rating" : 5
-    },
-    "eventTime" : "2014-11-02T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "2",
-    "targetEntityType" : "item",
-    "targetEntityId" : "3",
-    "properties" : {
-      "rating" : 3
-    },
-    "eventTime" : "2014-11-03T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "2",
-    "targetEntityType" : "item",
-    "targetEntityId" : "4",
-    "properties" : {
-      "rating" : 3
-    },
-    "eventTime" : "2014-11-04T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "2",
-    "targetEntityType" : "item",
-    "targetEntityId" : "5",
-    "properties" : {
-      "rating" : 4
-    },
-    "eventTime" : "2014-11-05T09:39:45.618-08:00" 
-  },
-  {
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "3",
-    "targetEntityType" : "item",
-    "targetEntityId" : "1",
-    "properties" : {
-      "rating" : 5
-    },
-    "eventTime" : "2014-11-01T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "3",
-    "targetEntityType" : "item",
-    "targetEntityId" : "2",
-    "properties" : {
-      "rating" : 2
-    },
-    "eventTime" : "2014-11-02T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "3",
-    "targetEntityType" : "item",
-    "targetEntityId" : "3",
-    "properties" : {
-      "rating" : 1
-    },
-    "eventTime" : "2014-11-03T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "3",
-    "targetEntityType" : "item",
-    "targetEntityId" : "4",
-    "properties" : {
-      "rating" : 5
-    },
-    "eventTime" : "2014-11-04T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "3",
-    "targetEntityType" : "item",
-    "targetEntityId" : "5",
-    "properties" : {
-      "rating" : 3
-    },
-    "eventTime" : "2014-11-05T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "4",
-    "targetEntityType" : "item",
-    "targetEntityId" : "1",
-    "properties" : {
-      "rating" : 3
-    },
-    "eventTime" : "2014-11-01T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "4",
-    "targetEntityType" : "item",
-    "targetEntityId" : "2",
-    "properties" : {
-      "rating" : 5
-    },
-    "eventTime" : "2014-11-02T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "4",
-    "targetEntityType" : "item",
-    "targetEntityId" : "3",
-    "properties" : {
-      "rating" : 4
-    },
-    "eventTime" : "2014-11-03T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "4",
-    "targetEntityType" : "item",
-    "targetEntityId" : "4",
-    "properties" : {
-      "rating" : 2
-    },
-    "eventTime" : "2014-11-04T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "4",
-    "targetEntityType" : "item",
-    "targetEntityId" : "5",
-    "properties" : {
-      "rating" : 4
-    },
-    "eventTime" : "2014-11-05T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "5",
-    "targetEntityType" : "item",
-    "targetEntityId" : "1",
-    "properties" : {
-      "rating" : 2
-    },
-    "eventTime" : "2014-11-01T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "5",
-    "targetEntityType" : "item",
-    "targetEntityId" : "2",
-    "properties" : {
-      "rating" : 5
-    },
-    "eventTime" : "2014-11-02T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "5",
-    "targetEntityType" : "item",
-    "targetEntityId" : "3",
-    "properties" : {
-      "rating" : 3
-    },
-    "eventTime" : "2014-11-03T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "5",
-    "targetEntityType" : "item",
-    "targetEntityId" : "4",
-    "properties" : {
-      "rating" : 5
-    },
-    "eventTime" : "2014-11-04T09:39:45.618-08:00" 
-  },
-  { 
-    "event" : "rate",
-    "entityType" : "user",
-    "entityId" : "5",
-    "targetEntityType" : "item",
-    "targetEntityId" : "5",
-    "properties" : {
-      "rating" : 4
-    },
-    "eventTime" : "2014-11-05T09:39:45.618-08:00" 
-  }
-]
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/data/eventserver_test/signup_events_51.json
----------------------------------------------------------------------
diff --git a/testing/pio_tests/data/eventserver_test/signup_events_51.json b/testing/pio_tests/data/eventserver_test/signup_events_51.json
deleted file mode 100644
index d8c31bd..0000000
--- a/testing/pio_tests/data/eventserver_test/signup_events_51.json
+++ /dev/null
@@ -1,257 +0,0 @@
-[
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "1"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "2"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "3"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "4"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "5"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "6"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "7"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "8"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "9"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "10"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "11"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "12"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "13"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "14"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "15"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "16"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "17"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "18"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "19"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "20"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "21"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "22"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "23"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "24"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "25"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "26"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "27"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "28"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "29"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "30"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "31"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "32"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "33"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "34"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "35"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "36"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "37"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "38"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "39"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "40"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "41"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "42"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "43"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "44"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "45"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "46"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "47"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "48"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "49"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "50"
-  },
-  { 
-    "event" : "sign-up",
-    "entityType" : "user",
-    "entityId" : "51"
-  }
-]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/data/quickstart_test/engine.json
----------------------------------------------------------------------
diff --git a/testing/pio_tests/data/quickstart_test/engine.json b/testing/pio_tests/data/quickstart_test/engine.json
deleted file mode 100644
index c7b6b4b..0000000
--- a/testing/pio_tests/data/quickstart_test/engine.json
+++ /dev/null
@@ -1,21 +0,0 @@
-{
-  "id": "default",
-  "description": "Default settings",
-  "engineFactory": "org.template.recommendation.RecommendationEngine",
-  "datasource": {
-    "params" : {
-      "appName": "MyRecommender"
-    }
-  },
-  "algorithms": [
-    {
-      "name": "als",
-      "params": {
-        "rank": 10,
-        "numIterations": 10,
-        "lambda": 0.01,
-        "seed": 3
-      }
-    }
-  ]
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/README.md
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/README.md b/testing/pio_tests/engines/recommendation-engine/README.md
deleted file mode 100644
index 6566db4..0000000
--- a/testing/pio_tests/engines/recommendation-engine/README.md
+++ /dev/null
@@ -1,42 +0,0 @@
-# Recommendation Template
-
-## Documentation
-
-Please refer to http://docs.prediction.io/templates/recommendation/quickstart/
-
-## Versions
-
-### develop
-
-### v0.3.2
-
-- Fix incorrect top items in batchPredict() (issue #5)
-
-### v0.3.1
-
-- Add Evaluation module and modify DataSource for it
-
-### v0.3.0
-
-- update for PredictionIO 0.9.2, including:
-
-  - use new PEventStore API
-  - use appName in DataSource parameter
-
-### v0.2.0
-
-- update build.sbt and template.json for PredictionIO 0.9.2
-
-### v0.1.2
-
-- update for PredictionIO 0.9.0
-
-### v0.1.1
-
-- Persist RDD to memory (.cache()) in DataSource for better performance and quick fix for new user/item ID BiMap error issue.
-
-### v0.1.0
-
-- initial version
-- known issue:
-  * If importing new events of new users/itesm during training, the new user/item id can't be found in the BiMap.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/build.sbt
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/build.sbt b/testing/pio_tests/engines/recommendation-engine/build.sbt
deleted file mode 100644
index c7413bb..0000000
--- a/testing/pio_tests/engines/recommendation-engine/build.sbt
+++ /dev/null
@@ -1,12 +0,0 @@
-import AssemblyKeys._
-
-assemblySettings
-
-name := "template-scala-parallel-recommendation"
-
-organization := "org.apache.predictionio"
-
-libraryDependencies ++= Seq(
-  "org.apache.predictionio"    %% "core"          % pioVersion.value % "provided",
-  "org.apache.spark" %% "spark-core"    % "1.3.0" % "provided",
-  "org.apache.spark" %% "spark-mllib"   % "1.3.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/data/import_eventserver.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/data/import_eventserver.py b/testing/pio_tests/engines/recommendation-engine/data/import_eventserver.py
deleted file mode 100644
index 0a1e109..0000000
--- a/testing/pio_tests/engines/recommendation-engine/data/import_eventserver.py
+++ /dev/null
@@ -1,56 +0,0 @@
-"""
-Import sample data for recommendation engine
-"""
-
-import predictionio
-import argparse
-import random
-
-RATE_ACTIONS_DELIMITER = "::"
-SEED = 3
-
-def import_events(client, file):
-  f = open(file, 'r')
-  random.seed(SEED)
-  count = 0
-  print "Importing data..."
-  for line in f:
-    data = line.rstrip('\r\n').split(RATE_ACTIONS_DELIMITER)
-    # For demonstration purpose, randomly mix in some buy events
-    if (random.randint(0, 1) == 1):
-      client.create_event(
-        event="rate",
-        entity_type="user",
-        entity_id=data[0],
-        target_entity_type="item",
-        target_entity_id=data[1],
-        properties= { "rating" : float(data[2]) }
-      )
-    else:
-      client.create_event(
-        event="buy",
-        entity_type="user",
-        entity_id=data[0],
-        target_entity_type="item",
-        target_entity_id=data[1]
-      )
-    count += 1
-  f.close()
-  print "%s events are imported." % count
-
-if __name__ == '__main__':
-  parser = argparse.ArgumentParser(
-    description="Import sample data for recommendation engine")
-  parser.add_argument('--access_key', default='invald_access_key')
-  parser.add_argument('--url', default="http://localhost:7070")
-  parser.add_argument('--file', default="./data/sample_movielens_data.txt")
-
-  args = parser.parse_args()
-  print args
-
-  client = predictionio.EventClient(
-    access_key=args.access_key,
-    url=args.url,
-    threads=5,
-    qsize=500)
-  import_events(client, args.file)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/data/sample_movielens_data.txt
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/data/sample_movielens_data.txt b/testing/pio_tests/engines/recommendation-engine/data/sample_movielens_data.txt
deleted file mode 100644
index f0eee19..0000000
--- a/testing/pio_tests/engines/recommendation-engine/data/sample_movielens_data.txt
+++ /dev/null
@@ -1,1501 +0,0 @@
-0::2::3
-0::3::1
-0::5::2
-0::9::4
-0::11::1
-0::12::2
-0::15::1
-0::17::1
-0::19::1
-0::21::1
-0::23::1
-0::26::3
-0::27::1
-0::28::1
-0::29::1
-0::30::1
-0::31::1
-0::34::1
-0::37::1
-0::41::2
-0::44::1
-0::45::2
-0::46::1
-0::47::1
-0::48::1
-0::50::1
-0::51::1
-0::54::1
-0::55::1
-0::59::2
-0::61::2
-0::64::1
-0::67::1
-0::68::1
-0::69::1
-0::71::1
-0::72::1
-0::77::2
-0::79::1
-0::83::1
-0::87::1
-0::89::2
-0::91::3
-0::92::4
-0::94::1
-0::95::2
-0::96::1
-0::98::1
-0::99::1
-1::2::2
-1::3::1
-1::4::2
-1::6::1
-1::9::3
-1::12::1
-1::13::1
-1::14::1
-1::16::1
-1::19::1
-1::21::3
-1::27::1
-1::28::3
-1::33::1
-1::36::2
-1::37::1
-1::40::1
-1::41::2
-1::43::1
-1::44::1
-1::47::1
-1::50::1
-1::54::1
-1::56::2
-1::57::1
-1::58::1
-1::60::1
-1::62::4
-1::63::1
-1::67::1
-1::68::4
-1::70::2
-1::72::1
-1::73::1
-1::74::2
-1::76::1
-1::77::3
-1::78::1
-1::81::1
-1::82::1
-1::85::3
-1::86::2
-1::88::2
-1::91::1
-1::92::2
-1::93::1
-1::94::2
-1::96::1
-1::97::1
-2::4::3
-2::6::1
-2::8::5
-2::9::1
-2::10::1
-2::12::3
-2::13::1
-2::15::2
-2::18::2
-2::19::4
-2::22::1
-2::26::1
-2::28::1
-2::34::4
-2::35::1
-2::37::5
-2::38::1
-2::39::5
-2::40::4
-2::47::1
-2::50::1
-2::52::2
-2::54::1
-2::55::1
-2::57::2
-2::58::2
-2::59::1
-2::61::1
-2::62::1
-2::64::1
-2::65::1
-2::66::3
-2::68::1
-2::71::3
-2::76::1
-2::77::1
-2::78::1
-2::80::1
-2::83::5
-2::85::1
-2::87::2
-2::88::1
-2::89::4
-2::90::1
-2::92::4
-2::93::5
-3::0::1
-3::1::1
-3::2::1
-3::7::3
-3::8::3
-3::9::1
-3::14::1
-3::15::1
-3::16::1
-3::18::4
-3::19::1
-3::24::3
-3::26::1
-3::29::3
-3::33::1
-3::34::3
-3::35::1
-3::36::3
-3::37::1
-3::38::2
-3::43::1
-3::44::1
-3::46::1
-3::47::1
-3::51::5
-3::52::3
-3::56::1
-3::58::1
-3::60::3
-3::62::1
-3::65::2
-3::66::1
-3::67::1
-3::68::2
-3::70::1
-3::72::2
-3::76::3
-3::79::3
-3::80::4
-3::81::1
-3::83::1
-3::84::1
-3::86::1
-3::87::2
-3::88::4
-3::89::1
-3::91::1
-3::94::3
-4::1::1
-4::6::1
-4::8::1
-4::9::1
-4::10::1
-4::11::1
-4::12::1
-4::13::1
-4::14::2
-4::15::1
-4::17::1
-4::20::1
-4::22::1
-4::23::1
-4::24::1
-4::29::4
-4::30::1
-4::31::1
-4::34::1
-4::35::1
-4::36::1
-4::39::2
-4::40::3
-4::41::4
-4::43::2
-4::44::1
-4::45::1
-4::46::1
-4::47::1
-4::49::2
-4::50::1
-4::51::1
-4::52::4
-4::54::1
-4::55::1
-4::60::3
-4::61::1
-4::62::4
-4::63::3
-4::65::1
-4::67::2
-4::69::1
-4::70::4
-4::71::1
-4::73::1
-4::78::1
-4::84::1
-4::85::1
-4::87::3
-4::88::3
-4::89::2
-4::96::1
-4::97::1
-4::98::1
-4::99::1
-5::0::1
-5::1::1
-5::4::1
-5::5::1
-5::8::1
-5::9::3
-5::10::2
-5::13::3
-5::15::1
-5::19::1
-5::20::3
-5::21::2
-5::23::3
-5::27::1
-5::28::1
-5::29::1
-5::31::1
-5::36::3
-5::38::2
-5::39::1
-5::42::1
-5::48::3
-5::49::4
-5::50::3
-5::51::1
-5::52::1
-5::54::1
-5::55::5
-5::56::3
-5::58::1
-5::60::1
-5::61::1
-5::64::3
-5::65::2
-5::68::4
-5::70::1
-5::71::1
-5::72::1
-5::74::1
-5::79::1
-5::81::2
-5::84::1
-5::85::1
-5::86::1
-5::88::1
-5::90::4
-5::91::2
-5::95::2
-5::99::1
-6::0::1
-6::1::1
-6::2::3
-6::5::1
-6::6::1
-6::9::1
-6::10::1
-6::15::2
-6::16::2
-6::17::1
-6::18::1
-6::20::1
-6::21::1
-6::22::1
-6::24::1
-6::25::5
-6::26::1
-6::28::1
-6::30::1
-6::33::1
-6::38::1
-6::39::1
-6::43::4
-6::44::1
-6::45::1
-6::48::1
-6::49::1
-6::50::1
-6::53::1
-6::54::1
-6::55::1
-6::56::1
-6::58::4
-6::59::1
-6::60::1
-6::61::3
-6::63::3
-6::66::1
-6::67::3
-6::68::1
-6::69::1
-6::71::2
-6::73::1
-6::75::1
-6::77::1
-6::79::1
-6::81::1
-6::84::1
-6::85::3
-6::86::1
-6::87::1
-6::88::1
-6::89::1
-6::91::2
-6::94::1
-6::95::2
-6::96::1
-7::1::1
-7::2::2
-7::3::1
-7::4::1
-7::7::1
-7::10::1
-7::11::2
-7::14::2
-7::15::1
-7::16::1
-7::18::1
-7::21::1
-7::22::1
-7::23::1
-7::25::5
-7::26::1
-7::29::4
-7::30::1
-7::31::3
-7::32::1
-7::33::1
-7::35::1
-7::37::2
-7::39::3
-7::40::2
-7::42::2
-7::44::1
-7::45::2
-7::47::4
-7::48::1
-7::49::1
-7::53::1
-7::54::1
-7::55::1
-7::56::1
-7::59::1
-7::61::2
-7::62::3
-7::63::2
-7::66::1
-7::67::3
-7::74::1
-7::75::1
-7::76::3
-7::77::1
-7::81::1
-7::82::1
-7::84::2
-7::85::4
-7::86::1
-7::92::2
-7::96::1
-7::97::1
-7::98::1
-8::0::1
-8::2::4
-8::3::2
-8::4::2
-8::5::1
-8::7::1
-8::9::1
-8::11::1
-8::15::1
-8::18::1
-8::19::1
-8::21::1
-8::29::5
-8::31::3
-8::33::1
-8::35::1
-8::36::1
-8::40::2
-8::44::1
-8::45::1
-8::50::1
-8::51::1
-8::52::5
-8::53::5
-8::54::1
-8::55::1
-8::56::1
-8::58::4
-8::60::3
-8::62::4
-8::64::1
-8::67::3
-8::69::1
-8::71::1
-8::72::3
-8::77::3
-8::78::1
-8::79::1
-8::83::1
-8::85::5
-8::86::1
-8::88::1
-8::90::1
-8::92::2
-8::95::4
-8::96::3
-8::97::1
-8::98::1
-8::99::1
-9::2::3
-9::3::1
-9::4::1
-9::5::1
-9::6::1
-9::7::5
-9::9::1
-9::12::1
-9::14::3
-9::15::1
-9::19::1
-9::21::1
-9::22::1
-9::24::1
-9::25::1
-9::26::1
-9::30::3
-9::32::4
-9::35::2
-9::36::2
-9::37::2
-9::38::1
-9::39::1
-9::43::3
-9::49::5
-9::50::3
-9::53::1
-9::54::1
-9::58::1
-9::59::1
-9::60::1
-9::61::1
-9::63::3
-9::64::3
-9::68::1
-9::69::1
-9::70::3
-9::71::1
-9::73::2
-9::75::1
-9::77::2
-9::81::2
-9::82::1
-9::83::1
-9::84::1
-9::86::1
-9::87::4
-9::88::1
-9::90::3
-9::94::2
-9::95::3
-9::97::2
-9::98::1
-10::0::3
-10::2::4
-10::4::3
-10::7::1
-10::8::1
-10::10::1
-10::13::2
-10::14::1
-10::16::2
-10::17::1
-10::18::1
-10::21::1
-10::22::1
-10::24::1
-10::25::3
-10::28::1
-10::35::1
-10::36::1
-10::37::1
-10::38::1
-10::39::1
-10::40::4
-10::41::2
-10::42::3
-10::43::1
-10::49::3
-10::50::1
-10::51::1
-10::52::1
-10::55::2
-10::56::1
-10::58::1
-10::63::1
-10::66::1
-10::67::2
-10::68::1
-10::75::1
-10::77::1
-10::79::1
-10::86::1
-10::89::3
-10::90::1
-10::97::1
-10::98::1
-11::0::1
-11::6::2
-11::9::1
-11::10::1
-11::11::1
-11::12::1
-11::13::4
-11::16::1
-11::18::5
-11::19::4
-11::20::1
-11::21::1
-11::22::1
-11::23::5
-11::25::1
-11::27::5
-11::30::5
-11::32::5
-11::35::3
-11::36::2
-11::37::2
-11::38::4
-11::39::1
-11::40::1
-11::41::1
-11::43::2
-11::45::1
-11::47::1
-11::48::5
-11::50::4
-11::51::3
-11::59::1
-11::61::1
-11::62::1
-11::64::1
-11::66::4
-11::67::1
-11::69::5
-11::70::1
-11::71::3
-11::72::3
-11::75::3
-11::76::1
-11::77::1
-11::78::1
-11::79::5
-11::80::3
-11::81::4
-11::82::1
-11::86::1
-11::88::1
-11::89::1
-11::90::4
-11::94::2
-11::97::3
-11::99::1
-12::2::1
-12::4::1
-12::6::1
-12::7::3
-12::8::1
-12::14::1
-12::15::2
-12::16::4
-12::17::5
-12::18::2
-12::21::1
-12::22::2
-12::23::3
-12::24::1
-12::25::1
-12::27::5
-12::30::2
-12::31::4
-12::35::5
-12::38::1
-12::41::1
-12::44::2
-12::45::1
-12::50::4
-12::51::1
-12::52::1
-12::53::1
-12::54::1
-12::56::2
-12::57::1
-12::60::1
-12::63::1
-12::64::5
-12::66::3
-12::67::1
-12::70::1
-12::72::1
-12::74::1
-12::75::1
-12::77::1
-12::78::1
-12::79::3
-12::82::2
-12::83::1
-12::84::1
-12::85::1
-12::86::1
-12::87::1
-12::88::1
-12::91::3
-12::92::1
-12::94::4
-12::95::2
-12::96::1
-12::98::2
-13::0::1
-13::3::1
-13::4::2
-13::5::1
-13::6::1
-13::12::1
-13::14::2
-13::15::1
-13::17::1
-13::18::3
-13::20::1
-13::21::1
-13::22::1
-13::26::1
-13::27::1
-13::29::3
-13::31::1
-13::33::1
-13::40::2
-13::43::2
-13::44::1
-13::45::1
-13::49::1
-13::51::1
-13::52::2
-13::53::3
-13::54::1
-13::62::1
-13::63::2
-13::64::1
-13::68::1
-13::71::1
-13::72::3
-13::73::1
-13::74::3
-13::77::2
-13::78::1
-13::79::2
-13::83::3
-13::85::1
-13::86::1
-13::87::2
-13::88::2
-13::90::1
-13::93::4
-13::94::1
-13::98::1
-13::99::1
-14::1::1
-14::3::3
-14::4::1
-14::5::1
-14::6::1
-14::7::1
-14::9::1
-14::10::1
-14::11::1
-14::12::1
-14::13::1
-14::14::3
-14::15::1
-14::16::1
-14::17::1
-14::20::1
-14::21::1
-14::24::1
-14::25::2
-14::27::1
-14::28::1
-14::29::5
-14::31::3
-14::34::1
-14::36::1
-14::37::2
-14::39::2
-14::40::1
-14::44::1
-14::45::1
-14::47::3
-14::48::1
-14::49::1
-14::51::1
-14::52::5
-14::53::3
-14::54::1
-14::55::1
-14::56::1
-14::62::4
-14::63::5
-14::67::3
-14::68::1
-14::69::3
-14::71::1
-14::72::4
-14::73::1
-14::76::5
-14::79::1
-14::82::1
-14::83::1
-14::88::1
-14::93::3
-14::94::1
-14::95::2
-14::96::4
-14::98::1
-15::0::1
-15::1::4
-15::2::1
-15::5::2
-15::6::1
-15::7::1
-15::13::1
-15::14::1
-15::15::1
-15::17::2
-15::19::2
-15::22::2
-15::23::2
-15::25::1
-15::26::3
-15::27::1
-15::28::2
-15::29::1
-15::32::1
-15::33::2
-15::34::1
-15::35::2
-15::36::1
-15::37::1
-15::39::1
-15::42::1
-15::46::5
-15::48::2
-15::50::2
-15::51::1
-15::52::1
-15::58::1
-15::62::1
-15::64::3
-15::65::2
-15::72::1
-15::73::1
-15::74::1
-15::79::1
-15::80::1
-15::81::1
-15::82::2
-15::85::1
-15::87::1
-15::91::2
-15::96::1
-15::97::1
-15::98::3
-16::2::1
-16::5::3
-16::6::2
-16::7::1
-16::9::1
-16::12::1
-16::14::1
-16::15::1
-16::19::1
-16::21::2
-16::29::4
-16::30::2
-16::32::1
-16::34::1
-16::36::1
-16::38::1
-16::46::1
-16::47::3
-16::48::1
-16::49::1
-16::50::1
-16::51::5
-16::54::5
-16::55::1
-16::56::2
-16::57::1
-16::60::1
-16::63::2
-16::65::1
-16::67::1
-16::72::1
-16::74::1
-16::80::1
-16::81::1
-16::82::1
-16::85::5
-16::86::1
-16::90::5
-16::91::1
-16::93::1
-16::94::3
-16::95::2
-16::96::3
-16::98::3
-16::99::1
-17::2::1
-17::3::1
-17::6::1
-17::10::4
-17::11::1
-17::13::2
-17::17::5
-17::19::1
-17::20::5
-17::22::4
-17::28::1
-17::29::1
-17::33::1
-17::34::1
-17::35::2
-17::37::1
-17::38::1
-17::45::1
-17::46::5
-17::47::1
-17::49::3
-17::51::1
-17::55::5
-17::56::3
-17::57::1
-17::58::1
-17::59::1
-17::60::1
-17::63::1
-17::66::1
-17::68::4
-17::69::1
-17::70::1
-17::72::1
-17::73::3
-17::78::1
-17::79::1
-17::82::2
-17::84::1
-17::90::5
-17::91::3
-17::92::1
-17::93::1
-17::94::4
-17::95::2
-17::97::1
-18::1::1
-18::4::3
-18::5::2
-18::6::1
-18::7::1
-18::10::1
-18::11::4
-18::12::2
-18::13::1
-18::15::1
-18::18::1
-18::20::1
-18::21::2
-18::22::1
-18::23::2
-18::25::1
-18::26::1
-18::27::1
-18::28::5
-18::29::1
-18::31::1
-18::32::1
-18::36::1
-18::38::5
-18::39::5
-18::40::1
-18::42::1
-18::43::1
-18::44::4
-18::46::1
-18::47::1
-18::48::1
-18::51::2
-18::55::1
-18::56::1
-18::57::1
-18::62::1
-18::63::1
-18::66::3
-18::67::1
-18::70::1
-18::75::1
-18::76::3
-18::77::1
-18::80::3
-18::81::3
-18::82::1
-18::83::5
-18::84::1
-18::97::1
-18::98::1
-18::99::2
-19::0::1
-19::1::1
-19::2::1
-19::4::1
-19::6::2
-19::11::1
-19::12::1
-19::14::1
-19::23::1
-19::26::1
-19::31::1
-19::32::4
-19::33::1
-19::34::1
-19::37::1
-19::38::1
-19::41::1
-19::43::1
-19::45::1
-19::48::1
-19::49::1
-19::50::2
-19::53::2
-19::54::3
-19::55::1
-19::56::2
-19::58::1
-19::61::1
-19::62::1
-19::63::1
-19::64::1
-19::65::1
-19::69::2
-19::72::1
-19::74::3
-19::76::1
-19::78::1
-19::79::1
-19::81::1
-19::82::1
-19::84::1
-19::86::1
-19::87::2
-19::90::4
-19::93::1
-19::94::4
-19::95::2
-19::96::1
-19::98::4
-20::0::1
-20::1::1
-20::2::2
-20::4::2
-20::6::1
-20::8::1
-20::12::1
-20::21::2
-20::22::5
-20::24::2
-20::25::1
-20::26::1
-20::29::2
-20::30::2
-20::32::2
-20::39::1
-20::40::1
-20::41::2
-20::45::2
-20::48::1
-20::50::1
-20::51::3
-20::53::3
-20::55::1
-20::57::2
-20::60::1
-20::61::1
-20::64::1
-20::66::1
-20::70::2
-20::72::1
-20::73::2
-20::75::4
-20::76::1
-20::77::4
-20::78::1
-20::79::1
-20::84::2
-20::85::2
-20::88::3
-20::89::1
-20::90::3
-20::91::1
-20::92::2
-20::93::1
-20::94::4
-20::97::1
-21::0::1
-21::2::4
-21::3::1
-21::7::2
-21::11::1
-21::12::1
-21::13::1
-21::14::3
-21::17::1
-21::19::1
-21::20::1
-21::21::1
-21::22::1
-21::23::1
-21::24::1
-21::27::1
-21::29::5
-21::30::2
-21::38::1
-21::40::2
-21::43::3
-21::44::1
-21::45::1
-21::46::1
-21::48::1
-21::51::1
-21::53::5
-21::54::1
-21::55::1
-21::56::1
-21::58::3
-21::59::3
-21::64::1
-21::66::1
-21::68::1
-21::71::1
-21::73::1
-21::74::4
-21::80::1
-21::81::1
-21::83::1
-21::84::1
-21::85::3
-21::87::4
-21::89::2
-21::92::2
-21::96::3
-21::99::1
-22::0::1
-22::3::2
-22::5::2
-22::6::2
-22::9::1
-22::10::1
-22::11::1
-22::13::1
-22::14::1
-22::16::1
-22::18::3
-22::19::1
-22::22::5
-22::25::1
-22::26::1
-22::29::3
-22::30::5
-22::32::4
-22::33::1
-22::35::1
-22::36::3
-22::37::1
-22::40::1
-22::41::3
-22::44::1
-22::45::2
-22::48::1
-22::51::5
-22::55::1
-22::56::2
-22::60::3
-22::61::1
-22::62::4
-22::63::1
-22::65::1
-22::66::1
-22::68::4
-22::69::4
-22::70::3
-22::71::1
-22::74::5
-22::75::5
-22::78::1
-22::80::3
-22::81::1
-22::82::1
-22::84::1
-22::86::1
-22::87::3
-22::88::5
-22::90::2
-22::92::3
-22::95::2
-22::96::2
-22::98::4
-22::99::1
-23::0::1
-23::2::1
-23::4::1
-23::6::2
-23::10::4
-23::12::1
-23::13::4
-23::14::1
-23::15::1
-23::18::4
-23::22::2
-23::23::4
-23::24::1
-23::25::1
-23::26::1
-23::27::5
-23::28::1
-23::29::1
-23::30::4
-23::32::5
-23::33::2
-23::36::3
-23::37::1
-23::38::1
-23::39::1
-23::43::1
-23::48::5
-23::49::5
-23::50::4
-23::53::1
-23::55::5
-23::57::1
-23::59::1
-23::60::1
-23::61::1
-23::64::4
-23::65::5
-23::66::2
-23::67::1
-23::68::3
-23::69::1
-23::72::1
-23::73::3
-23::77::1
-23::82::2
-23::83::1
-23::84::1
-23::85::1
-23::87::3
-23::88::1
-23::95::2
-23::97::1
-24::4::1
-24::6::3
-24::7::1
-24::10::2
-24::12::1
-24::15::1
-24::19::1
-24::24::1
-24::27::3
-24::30::5
-24::31::1
-24::32::3
-24::33::1
-24::37::1
-24::39::1
-24::40::1
-24::42::1
-24::43::3
-24::45::2
-24::46::1
-24::47::1
-24::48::1
-24::49::1
-24::50::1
-24::52::5
-24::57::1
-24::59::4
-24::63::4
-24::65::1
-24::66::1
-24::67::1
-24::68::3
-24::69::5
-24::71::1
-24::72::4
-24::77::4
-24::78::1
-24::80::1
-24::82::1
-24::84::1
-24::86::1
-24::87::1
-24::88::2
-24::89::1
-24::90::5
-24::91::1
-24::92::1
-24::94::2
-24::95::1
-24::96::5
-24::98::1
-24::99::1
-25::1::3
-25::2::1
-25::7::1
-25::9::1
-25::12::3
-25::16::3
-25::17::1
-25::18::1
-25::20::1
-25::22::1
-25::23::1
-25::26::2
-25::29::1
-25::30::1
-25::31::2
-25::33::4
-25::34::3
-25::35::2
-25::36::1
-25::37::1
-25::40::1
-25::41::1
-25::43::1
-25::47::4
-25::50::1
-25::51::1
-25::53::1
-25::56::1
-25::58::2
-25::64::2
-25::67::2
-25::68::1
-25::70::1
-25::71::4
-25::73::1
-25::74::1
-25::76::1
-25::79::1
-25::82::1
-25::84::2
-25::85::1
-25::91::3
-25::92::1
-25::94::1
-25::95::1
-25::97::2
-26::0::1
-26::1::1
-26::2::1
-26::3::1
-26::4::4
-26::5::2
-26::6::3
-26::7::5
-26::13::3
-26::14::1
-26::16::1
-26::18::3
-26::20::1
-26::21::3
-26::22::5
-26::23::5
-26::24::5
-26::27::1
-26::31::1
-26::35::1
-26::36::4
-26::40::1
-26::44::1
-26::45::2
-26::47::1
-26::48::1
-26::49::3
-26::50::2
-26::52::1
-26::54::4
-26::55::1
-26::57::3
-26::58::1
-26::61::1
-26::62::2
-26::66::1
-26::68::4
-26::71::1
-26::73::4
-26::76::1
-26::81::3
-26::85::1
-26::86::3
-26::88::5
-26::91::1
-26::94::5
-26::95::1
-26::96::1
-26::97::1
-27::0::1
-27::9::1
-27::10::1
-27::18::4
-27::19::3
-27::20::1
-27::22::2
-27::24::2
-27::25::1
-27::27::3
-27::28::1
-27::29::1
-27::31::1
-27::33::3
-27::40::1
-27::42::1
-27::43::1
-27::44::3
-27::45::1
-27::51::3
-27::52::1
-27::55::3
-27::57::1
-27::59::1
-27::60::1
-27::61::1
-27::64::1
-27::66::3
-27::68::1
-27::70::1
-27::71::2
-27::72::1
-27::75::3
-27::78::1
-27::80::3
-27::82::1
-27::83::3
-27::86::1
-27::87::2
-27::90::1
-27::91::1
-27::92::1
-27::93::1
-27::94::2
-27::95::1
-27::98::1
-28::0::3
-28::1::1
-28::2::4
-28::3::1
-28::6::1
-28::7::1
-28::12::5
-28::13::2
-28::14::1
-28::15::1
-28::17::1
-28::19::3
-28::20::1
-28::23::3
-28::24::3
-28::27::1
-28::29::1
-28::33::1
-28::34::1
-28::36::1
-28::38::2
-28::39::2
-28::44::1
-28::45::1
-28::49::4
-28::50::1
-28::52::1
-28::54::1
-28::56::1
-28::57::3
-28::58::1
-28::59::1
-28::60::1
-28::62::3
-28::63::1
-28::65::1
-28::75::1
-28::78::1
-28::81::5
-28::82::4
-28::83::1
-28::85::1
-28::88::2
-28::89::4
-28::90::1
-28::92::5
-28::94::1
-28::95::2
-28::98::1
-28::99::1
-29::3::1
-29::4::1
-29::5::1
-29::7::2
-29::9::1
-29::10::3
-29::11::1
-29::13::3
-29::14::1
-29::15::1
-29::17::3
-29::19::3
-29::22::3
-29::23::4
-29::25::1
-29::29::1
-29::31::1
-29::32::4
-29::33::2
-29::36::2
-29::38::3
-29::39::1
-29::42::1
-29::46::5
-29::49::3
-29::51::2
-29::59::1
-29::61::1
-29::62::1
-29::67::1
-29::68::3
-29::69::1
-29::70::1
-29::74::1
-29::75::1
-29::79::2
-29::80::1
-29::81::2
-29::83::1
-29::85::1
-29::86::1
-29::90::4
-29::93::1
-29::94::4
-29::97::1
-29::99::1

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/data/send_query.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/data/send_query.py b/testing/pio_tests/engines/recommendation-engine/data/send_query.py
deleted file mode 100644
index 7eaf53e..0000000
--- a/testing/pio_tests/engines/recommendation-engine/data/send_query.py
+++ /dev/null
@@ -1,7 +0,0 @@
-"""
-Send sample query to prediction engine
-"""
-
-import predictionio
-engine_client = predictionio.EngineClient(url="http://localhost:8000")
-print engine_client.send_query({"user": "1", "num": 4})

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/engine.json
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/engine.json b/testing/pio_tests/engines/recommendation-engine/engine.json
deleted file mode 100644
index 8d53d56..0000000
--- a/testing/pio_tests/engines/recommendation-engine/engine.json
+++ /dev/null
@@ -1,21 +0,0 @@
-{
-  "id": "default",
-  "description": "Default settings",
-  "engineFactory": "org.template.recommendation.RecommendationEngine",
-  "datasource": {
-    "params" : {
-      "appName": "MyApp1"
-    }
-  },
-  "algorithms": [
-    {
-      "name": "als",
-      "params": {
-        "rank": 10,
-        "numIterations": 10,
-        "lambda": 0.01,
-        "seed": 3
-      }
-    }
-  ]
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/manifest.json
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/manifest.json b/testing/pio_tests/engines/recommendation-engine/manifest.json
deleted file mode 100644
index 9c545ce..0000000
--- a/testing/pio_tests/engines/recommendation-engine/manifest.json
+++ /dev/null
@@ -1 +0,0 @@
-{"id":"yDBpzjz39AjIxlOAh8W4t3QSc75uPCuJ","version":"ee98ff9c009ef0d9fb1bc6b78750b83a0ceb37b2","name":"recommendation-engine","description":"pio-autogen-manifest","files":[],"engineFactory":""}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/project/assembly.sbt
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/project/assembly.sbt b/testing/pio_tests/engines/recommendation-engine/project/assembly.sbt
deleted file mode 100644
index 54c3252..0000000
--- a/testing/pio_tests/engines/recommendation-engine/project/assembly.sbt
+++ /dev/null
@@ -1 +0,0 @@
-addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/project/pio-build.sbt b/testing/pio_tests/engines/recommendation-engine/project/pio-build.sbt
deleted file mode 100644
index 8346a96..0000000
--- a/testing/pio_tests/engines/recommendation-engine/project/pio-build.sbt
+++ /dev/null
@@ -1 +0,0 @@
-addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSAlgorithm.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSAlgorithm.scala
deleted file mode 100644
index 17c2806..0000000
--- a/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSAlgorithm.scala
+++ /dev/null
@@ -1,138 +0,0 @@
-package org.template.recommendation
-
-import org.apache.predictionio.controller.PAlgorithm
-import org.apache.predictionio.controller.Params
-import org.apache.predictionio.data.storage.BiMap
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-import org.apache.spark.mllib.recommendation.ALS
-import org.apache.spark.mllib.recommendation.{Rating => MLlibRating}
-import org.apache.spark.mllib.recommendation.ALSModel
-
-import grizzled.slf4j.Logger
-
-case class ALSAlgorithmParams(
-  rank: Int,
-  numIterations: Int,
-  lambda: Double,
-  seed: Option[Long]) extends Params
-
-class ALSAlgorithm(val ap: ALSAlgorithmParams)
-  extends PAlgorithm[PreparedData, ALSModel, Query, PredictedResult] {
-
-  @transient lazy val logger = Logger[this.type]
-
-  if (ap.numIterations > 30) {
-    logger.warn(
-      s"ALSAlgorithmParams.numIterations > 30, current: ${ap.numIterations}. " +
-      s"There is a chance of running to StackOverflowException. Lower this number to remedy it")
-  }
-
-  def train(sc: SparkContext, data: PreparedData): ALSModel = {
-    // MLLib ALS cannot handle empty training data.
-    require(!data.ratings.take(1).isEmpty,
-      s"RDD[Rating] in PreparedData cannot be empty." +
-      " Please check if DataSource generates TrainingData" +
-      " and Preprator generates PreparedData correctly.")
-    // Convert user and item String IDs to Int index for MLlib
-
-    val userStringIntMap = BiMap.stringInt(data.ratings.map(_.user))
-    val itemStringIntMap = BiMap.stringInt(data.ratings.map(_.item))
-    val mllibRatings = data.ratings.map( r =>
-      // MLlibRating requires integer index for user and item
-      MLlibRating(userStringIntMap(r.user), itemStringIntMap(r.item), r.rating)
-    )
-
-    // seed for MLlib ALS
-    val seed = ap.seed.getOrElse(System.nanoTime)
-
-    // If you only have one type of implicit event (Eg. "view" event only),
-    // replace ALS.train(...) with
-    //val m = ALS.trainImplicit(
-      //ratings = mllibRatings,
-      //rank = ap.rank,
-      //iterations = ap.numIterations,
-      //lambda = ap.lambda,
-      //blocks = -1,
-      //alpha = 1.0,
-      //seed = seed)
-
-    val m = ALS.train(
-      ratings = mllibRatings,
-      rank = ap.rank,
-      iterations = ap.numIterations,
-      lambda = ap.lambda,
-      blocks = -1,
-      seed = seed)
-
-    new ALSModel(
-      rank = m.rank,
-      userFeatures = m.userFeatures,
-      productFeatures = m.productFeatures,
-      userStringIntMap = userStringIntMap,
-      itemStringIntMap = itemStringIntMap)
-  }
-
-  def predict(model: ALSModel, query: Query): PredictedResult = {
-    // Convert String ID to Int index for Mllib
-    model.userStringIntMap.get(query.user).map { userInt =>
-      // create inverse view of itemStringIntMap
-      val itemIntStringMap = model.itemStringIntMap.inverse
-      // recommendProducts() returns Array[MLlibRating], which uses item Int
-      // index. Convert it to String ID for returning PredictedResult
-      val itemScores = model.recommendProducts(userInt, query.num)
-        .map (r => ItemScore(itemIntStringMap(r.product), r.rating))
-      new PredictedResult(itemScores)
-    }.getOrElse{
-      logger.info(s"No prediction for unknown user ${query.user}.")
-      new PredictedResult(Array.empty)
-    }
-  }
-
-  // This function is used by the evaluation module, where a batch of queries is sent to this engine
-  // for evaluation purpose.
-  override def batchPredict(model: ALSModel, queries: RDD[(Long, Query)]): RDD[(Long, PredictedResult)] = {
-    val userIxQueries: RDD[(Int, (Long, Query))] = queries
-    .map { case (ix, query) => {
-      // If user not found, then the index is -1
-      val userIx = model.userStringIntMap.get(query.user).getOrElse(-1)
-      (userIx, (ix, query))
-    }}
-
-    // Cross product of all valid users from the queries and products in the model.
-    val usersProducts: RDD[(Int, Int)] = userIxQueries
-      .keys
-      .filter(_ != -1)
-      .cartesian(model.productFeatures.map(_._1))
-
-    // Call mllib ALS's predict function.
-    val ratings: RDD[MLlibRating] = model.predict(usersProducts)
-
-    // The following code construct predicted results from mllib's ratings.
-    // Not optimal implementation. Instead of groupBy, should use combineByKey with a PriorityQueue
-    val userRatings: RDD[(Int, Iterable[MLlibRating])] = ratings.groupBy(_.user)
-
-    userIxQueries.leftOuterJoin(userRatings)
-    .map {
-      // When there are ratings
-      case (userIx, ((ix, query), Some(ratings))) => {
-        val topItemScores: Array[ItemScore] = ratings
-        .toArray
-        .sortBy(_.rating)(Ordering.Double.reverse) // note: from large to small ordering
-        .take(query.num)
-        .map { rating => ItemScore(
-          model.itemStringIntMap.inverse(rating.product),
-          rating.rating) }
-
-        (ix, PredictedResult(itemScores = topItemScores))
-      }
-      // When user doesn't exist in training data
-      case (userIx, ((ix, query), None)) => {
-        require(userIx == -1)
-        (ix, PredictedResult(itemScores = Array.empty))
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSModel.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSModel.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSModel.scala
deleted file mode 100644
index 243c1d1..0000000
--- a/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSModel.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-package org.apache.spark.mllib.recommendation
-// This must be the same package as Spark's MatrixFactorizationModel because
-// MatrixFactorizationModel's constructor is private and we are using
-// its constructor in order to save and load the model
-
-import org.template.recommendation.ALSAlgorithmParams
-
-import org.apache.predictionio.controller.IPersistentModel
-import org.apache.predictionio.controller.IPersistentModelLoader
-import org.apache.predictionio.data.storage.BiMap
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-class ALSModel(
-    override val rank: Int,
-    override val userFeatures: RDD[(Int, Array[Double])],
-    override val productFeatures: RDD[(Int, Array[Double])],
-    val userStringIntMap: BiMap[String, Int],
-    val itemStringIntMap: BiMap[String, Int])
-  extends MatrixFactorizationModel(rank, userFeatures, productFeatures)
-  with IPersistentModel[ALSAlgorithmParams] {
-
-  def save(id: String, params: ALSAlgorithmParams,
-    sc: SparkContext): Boolean = {
-
-    sc.parallelize(Seq(rank)).saveAsObjectFile(s"/tmp/${id}/rank")
-    userFeatures.saveAsObjectFile(s"/tmp/${id}/userFeatures")
-    productFeatures.saveAsObjectFile(s"/tmp/${id}/productFeatures")
-    sc.parallelize(Seq(userStringIntMap))
-      .saveAsObjectFile(s"/tmp/${id}/userStringIntMap")
-    sc.parallelize(Seq(itemStringIntMap))
-      .saveAsObjectFile(s"/tmp/${id}/itemStringIntMap")
-    true
-  }
-
-  override def toString = {
-    s"userFeatures: [${userFeatures.count()}]" +
-    s"(${userFeatures.take(2).toList}...)" +
-    s" productFeatures: [${productFeatures.count()}]" +
-    s"(${productFeatures.take(2).toList}...)" +
-    s" userStringIntMap: [${userStringIntMap.size}]" +
-    s"(${userStringIntMap.take(2)}...)" +
-    s" itemStringIntMap: [${itemStringIntMap.size}]" +
-    s"(${itemStringIntMap.take(2)}...)"
-  }
-}
-
-object ALSModel
-  extends IPersistentModelLoader[ALSAlgorithmParams, ALSModel] {
-  def apply(id: String, params: ALSAlgorithmParams,
-    sc: Option[SparkContext]) = {
-    new ALSModel(
-      rank = sc.get.objectFile[Int](s"/tmp/${id}/rank").first,
-      userFeatures = sc.get.objectFile(s"/tmp/${id}/userFeatures"),
-      productFeatures = sc.get.objectFile(s"/tmp/${id}/productFeatures"),
-      userStringIntMap = sc.get
-        .objectFile[BiMap[String, Int]](s"/tmp/${id}/userStringIntMap").first,
-      itemStringIntMap = sc.get
-        .objectFile[BiMap[String, Int]](s"/tmp/${id}/itemStringIntMap").first)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/DataSource.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/DataSource.scala
deleted file mode 100644
index eea3ae6..0000000
--- a/testing/pio_tests/engines/recommendation-engine/src/main/scala/DataSource.scala
+++ /dev/null
@@ -1,103 +0,0 @@
-package org.template.recommendation
-
-import org.apache.predictionio.controller.PDataSource
-import org.apache.predictionio.controller.EmptyEvaluationInfo
-import org.apache.predictionio.controller.EmptyActualResult
-import org.apache.predictionio.controller.Params
-import org.apache.predictionio.data.storage.Event
-import org.apache.predictionio.data.store.PEventStore
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-import grizzled.slf4j.Logger
-
-case class DataSourceEvalParams(kFold: Int, queryNum: Int)
-
-case class DataSourceParams(
-  appName: String,
-  evalParams: Option[DataSourceEvalParams]) extends Params
-
-class DataSource(val dsp: DataSourceParams)
-  extends PDataSource[TrainingData,
-      EmptyEvaluationInfo, Query, ActualResult] {
-
-  @transient lazy val logger = Logger[this.type]
-
-  def getRatings(sc: SparkContext): RDD[Rating] = {
-
-    val eventsRDD: RDD[Event] = PEventStore.find(
-      appName = dsp.appName,
-      entityType = Some("user"),
-      eventNames = Some(List("rate", "buy")), // read "rate" and "buy" event
-      // targetEntityType is optional field of an event.
-      targetEntityType = Some(Some("item")))(sc)
-
-    val ratingsRDD: RDD[Rating] = eventsRDD.map { event =>
-      val rating = try {
-        val ratingValue: Double = event.event match {
-          case "rate" => event.properties.get[Double]("rating")
-          case "buy" => 4.0 // map buy event to rating value of 4
-          case _ => throw new Exception(s"Unexpected event ${event} is read.")
-        }
-        // entityId and targetEntityId is String
-        Rating(event.entityId,
-          event.targetEntityId.get,
-          ratingValue)
-      } catch {
-        case e: Exception => {
-          logger.error(s"Cannot convert ${event} to Rating. Exception: ${e}.")
-          throw e
-        }
-      }
-      rating
-    }.cache()
-
-    ratingsRDD
-  }
-
-  override
-  def readTraining(sc: SparkContext): TrainingData = {
-    new TrainingData(getRatings(sc))
-  }
-
-  override
-  def readEval(sc: SparkContext)
-  : Seq[(TrainingData, EmptyEvaluationInfo, RDD[(Query, ActualResult)])] = {
-    require(!dsp.evalParams.isEmpty, "Must specify evalParams")
-    val evalParams = dsp.evalParams.get
-
-    val kFold = evalParams.kFold
-    val ratings: RDD[(Rating, Long)] = getRatings(sc).zipWithUniqueId
-    ratings.cache
-
-    (0 until kFold).map { idx => {
-      val trainingRatings = ratings.filter(_._2 % kFold != idx).map(_._1)
-      val testingRatings = ratings.filter(_._2 % kFold == idx).map(_._1)
-
-      val testingUsers: RDD[(String, Iterable[Rating])] = testingRatings.groupBy(_.user)
-
-      (new TrainingData(trainingRatings),
-        new EmptyEvaluationInfo(),
-        testingUsers.map {
-          case (user, ratings) => (Query(user, evalParams.queryNum), ActualResult(ratings.toArray))
-        }
-      )
-    }}
-  }
-}
-
-case class Rating(
-  user: String,
-  item: String,
-  rating: Double
-)
-
-class TrainingData(
-  val ratings: RDD[Rating]
-) extends Serializable {
-  override def toString = {
-    s"ratings: [${ratings.count()}] (${ratings.take(2).toList}...)"
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Engine.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Engine.scala
deleted file mode 100644
index 79840dc..0000000
--- a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Engine.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package org.template.recommendation
-
-import org.apache.predictionio.controller.IEngineFactory
-import org.apache.predictionio.controller.Engine
-
-case class Query(
-  user: String,
-  num: Int
-) extends Serializable
-
-case class PredictedResult(
-  itemScores: Array[ItemScore]
-) extends Serializable
-
-case class ActualResult(
-  ratings: Array[Rating]
-) extends Serializable
-
-case class ItemScore(
-  item: String,
-  score: Double
-) extends Serializable
-
-object RecommendationEngine extends IEngineFactory {
-  def apply() = {
-    new Engine(
-      classOf[DataSource],
-      classOf[Preparator],
-      Map("als" -> classOf[ALSAlgorithm]),
-      classOf[Serving])
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Evaluation.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Evaluation.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Evaluation.scala
deleted file mode 100644
index 34e5689..0000000
--- a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Evaluation.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-package org.template.recommendation
-
-import org.apache.predictionio.controller.Evaluation
-import org.apache.predictionio.controller.OptionAverageMetric
-import org.apache.predictionio.controller.AverageMetric
-import org.apache.predictionio.controller.EmptyEvaluationInfo
-import org.apache.predictionio.controller.EngineParamsGenerator
-import org.apache.predictionio.controller.EngineParams
-import org.apache.predictionio.controller.MetricEvaluator
-
-// Usage:
-// $ pio eval org.template.recommendation.RecommendationEvaluation \
-//   org.template.recommendation.EngineParamsList
-
-case class PrecisionAtK(k: Int, ratingThreshold: Double = 2.0)
-    extends OptionAverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
-  require(k > 0, "k must be greater than 0")
-
-  override def header = s"Precision@K (k=$k, threshold=$ratingThreshold)"
-
-  def calculate(q: Query, p: PredictedResult, a: ActualResult): Option[Double] = {
-    val positives: Set[String] = a.ratings.filter(_.rating >= ratingThreshold).map(_.item).toSet
-
-    // If there is no positive results, Precision is undefined. We don't consider this case in the
-    // metrics, hence we return None.
-    if (positives.size == 0) {
-      return None
-    }
-
-    val tpCount: Int = p.itemScores.take(k).filter(is => positives(is.item)).size
-
-    Some(tpCount.toDouble / math.min(k, positives.size))
-  }
-}
-
-case class PositiveCount(ratingThreshold: Double = 2.0)
-    extends AverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
-  override def header = s"PositiveCount (threshold=$ratingThreshold)"
-
-  def calculate(q: Query, p: PredictedResult, a: ActualResult): Double = {
-    a.ratings.filter(_.rating >= ratingThreshold).size
-  }
-}
-
-object RecommendationEvaluation extends Evaluation {
-  engineEvaluator = (
-    RecommendationEngine(),
-    MetricEvaluator(
-      metric = PrecisionAtK(k = 10, ratingThreshold = 4.0),
-      otherMetrics = Seq(
-        PositiveCount(ratingThreshold = 4.0),
-        PrecisionAtK(k = 10, ratingThreshold = 2.0),
-        PositiveCount(ratingThreshold = 2.0),
-        PrecisionAtK(k = 10, ratingThreshold = 1.0),
-        PositiveCount(ratingThreshold = 1.0)
-      )))
-}
-
-
-object ComprehensiveRecommendationEvaluation extends Evaluation {
-  val ratingThresholds = Seq(0.0, 2.0, 4.0)
-  val ks = Seq(1, 3, 10)
-
-  engineEvaluator = (
-    RecommendationEngine(),
-    MetricEvaluator(
-      metric = PrecisionAtK(k = 3, ratingThreshold = 2.0),
-      otherMetrics = (
-        (for (r <- ratingThresholds) yield PositiveCount(ratingThreshold = r)) ++
-        (for (r <- ratingThresholds; k <- ks) yield PrecisionAtK(k = k, ratingThreshold = r))
-      )))
-}
-
-
-trait BaseEngineParamsList extends EngineParamsGenerator {
-  protected val baseEP = EngineParams(
-    dataSourceParams = DataSourceParams(
-      appName = "INVALID_APP_NAME",
-      evalParams = Some(DataSourceEvalParams(kFold = 5, queryNum = 10))))
-}
-
-object EngineParamsList extends BaseEngineParamsList {
-  engineParamsList = for(
-    rank <- Seq(5, 10, 20);
-    numIterations <- Seq(1, 5, 10))
-    yield baseEP.copy(
-      algorithmParamsList = Seq(
-        ("als", ALSAlgorithmParams(rank, numIterations, 0.01, Some(3)))))
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Preparator.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Preparator.scala
deleted file mode 100644
index 8f2f7e4..0000000
--- a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Preparator.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.template.recommendation
-
-import org.apache.predictionio.controller.PPreparator
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-class Preparator
-  extends PPreparator[TrainingData, PreparedData] {
-
-  def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
-    new PreparedData(ratings = trainingData.ratings)
-  }
-}
-
-class PreparedData(
-  val ratings: RDD[Rating]
-) extends Serializable

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Serving.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Serving.scala
deleted file mode 100644
index 38ba8b9..0000000
--- a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Serving.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-package org.template.recommendation
-
-import org.apache.predictionio.controller.LServing
-
-class Serving
-  extends LServing[Query, PredictedResult] {
-
-  override
-  def serve(query: Query,
-    predictedResults: Seq[PredictedResult]): PredictedResult = {
-    predictedResults.head
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/engines/recommendation-engine/template.json
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/template.json b/testing/pio_tests/engines/recommendation-engine/template.json
deleted file mode 100644
index fb4a50b..0000000
--- a/testing/pio_tests/engines/recommendation-engine/template.json
+++ /dev/null
@@ -1 +0,0 @@
-{"pio": {"version": { "min": "0.9.2" }}}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/globals.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/globals.py b/testing/pio_tests/globals.py
deleted file mode 100644
index 1134501..0000000
--- a/testing/pio_tests/globals.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import subprocess
-
-SUPPRESS_STDOUT=False
-SUPPRESS_STDERR=False
-LOGGER_NAME='INT_TESTS'
-
-def std_out():
-  if SUPPRESS_STDOUT:
-    return subprocess.DEVNULL
-  else:
-    return None
-
-def std_err():
-  if SUPPRESS_STDERR:
-    return subprocess.DEVNULL
-  else:
-    return None

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/integration.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/integration.py b/testing/pio_tests/integration.py
deleted file mode 100644
index 441365e..0000000
--- a/testing/pio_tests/integration.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import unittest
-import logging
-import pio_tests.globals as globals
-
-class TestContext:
-  """Class representing the settings provided for every test"""
-
-  def __init__(self, engine_directory, data_directory, es_ip='0.0.0.0', es_port=7070):
-    """
-    Args:
-      engine_directory (str): path to the directory where the engines are stored
-      data_directory (str):   path to the directory where tests can keep their data
-      es_ip (str):            ip of the eventserver
-      es_port (int):          port of the eventserver
-    """
-    self.engine_directory = engine_directory
-    self.data_directory = data_directory
-    self.es_ip = es_ip
-    self.es_port = es_port
-
-class BaseTestCase(unittest.TestCase):
-  """This is the base class for all integration tests
-
-  This class sets up a `TestContext` object and a logger for every test case
-  """
-  def __init__(self, test_context, methodName='runTest'):
-    super(BaseTestCase, self).__init__(methodName)
-    self.test_context = test_context
-    self.log = logging.getLogger(globals.LOGGER_NAME)
-
-class AppContext:
-  """ This class is a description of an instance of the engine"""
-
-  def __init__(self, name, template, engine_json_path=None):
-    """
-    Args:
-      name (str): application name
-      template (str): either the name of an engine from the engines directory
-          or a link to repository with the engine
-      engine_json_path (str): path to json file describing an engine (a custom engine.json)
-          to be used for the application. If `None`, engine.json from the engine's directory
-          will be used
-    """
-    self.name = name
-    self.template = template
-    self.engine_json_path = engine_json_path

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/scenarios/__init__.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/scenarios/__init__.py b/testing/pio_tests/scenarios/__init__.py
deleted file mode 100644
index e69de29..0000000


Mime
View raw message