predictionio-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From don...@apache.org
Subject [49/52] [abbrv] incubator-predictionio git commit: Renamed directory testing to tests
Date Tue, 09 Aug 2016 21:43:57 GMT
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/scenarios/basic_app_usecases.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/scenarios/basic_app_usecases.py b/testing/pio_tests/scenarios/basic_app_usecases.py
deleted file mode 100644
index d8b3a1e..0000000
--- a/testing/pio_tests/scenarios/basic_app_usecases.py
+++ /dev/null
@@ -1,154 +0,0 @@
-import os
-import unittest
-import random
-import logging
-import time
-from subprocess import CalledProcessError
-from pio_tests.integration import BaseTestCase, AppContext
-from utils import *
-
-ITEMS_COUNT = 12
-
-def get_buy_events(users, per_user=2):
-  events = []
-  for u in range(users):
-    items = set([random.randint(0, ITEMS_COUNT) for i in range(per_user)])
-    for item in items:
-      events.append({
-        "event": "buy",
-        "entityType": "user",
-        "entityId": u,
-        "targetEntityType": "item",
-        "targetEntityId": item })
-
-  return events
-
-def get_rate_events(users, per_user=2):
-  events = []
-  for u in range(users):
-    items = set([random.randint(0, ITEMS_COUNT) for i in range(per_user)])
-    for item in items:
-      events.append( {
-        "event": "rate",
-        "entityType": "user",
-        "entityId": u,
-        "targetEntityType": "item",
-        "targetEntityId": item,
-        "properties": { "rating" : float(random.randint(1,5)) } })
-
-  return events
-
-
-class BasicAppUsecases(BaseTestCase):
-
-  def setUp(self):
-    random.seed(3)
-    self.log.info("Setting up the engine")
-
-    template_path = pjoin(
-        self.test_context.engine_directory, "recommendation-engine")
-    engine_json_path = pjoin(
-        self.test_context.data_directory, "quickstart_test/engine.json")
-
-    app_context = AppContext(
-        name="MyRecommender",
-        template=template_path,
-        engine_json_path=engine_json_path)
-
-    self.app = AppEngine(self.test_context, app_context)
-
-  def runTest(self):
-    self.app_creation()
-    self.check_app_list()
-    self.check_data()
-    self.check_build()
-    self.check_train_and_deploy()
-
-  def app_creation(self):
-    self.log.info("Adding a new application")
-    description = "SomeDescription"
-    self.app.new(description=description)
-    self.assertEqual(description, self.app.description)
-
-    self.log.info("Creating an app again - should fail")
-    self.assertRaises(CalledProcessError, lambda : self.app.new())
-
-  def check_app_list(self):
-    self.log.info("Checking if app is on the list")
-    apps = pio_app_list()
-    self.assertEqual(1,
-        len([a for a in apps if a['name'] == self.app.app_context.name]))
-
-  def check_data(self):
-    self.log.info("Importing events")
-    buy_events = get_buy_events(20, 1)
-    rate_events = get_rate_events(20, 1)
-
-    for ev in buy_events + rate_events:
-      self.assertEquals(201, self.app.send_event(ev).status_code)
-
-    self.log.info("Checking imported events")
-    r = self.app.get_events(params={'limit': -1})
-    self.assertEqual(200, r.status_code)
-    self.assertEqual(len(buy_events) + len(rate_events), len(r.json()))
-
-    self.log.info("Deleting entire data")
-    self.app.delete_data()
-    self.log.info("Checking if there are no events at all")
-    r = self.app.get_events(params={'limit': -1})
-    self.assertEqual(404, r.status_code)
-
-  def check_build(self):
-    self.log.info("Clean build")
-    self.app.build(clean=True)
-    self.log.info("Second build")
-    self.app.build()
-
-  def check_train_and_deploy(self):
-    self.log.info("import some data first")
-    buy_events = get_buy_events(20, 5)
-    rate_events = get_rate_events(20, 5)
-    for ev in buy_events + rate_events:
-      self.assertEquals(201, self.app.send_event(ev).status_code)
-
-    self.log.info("Training")
-    self.app.train()
-    self.log.info("Deploying")
-    self.app.deploy()
-    self.assertFalse(self.app.deployed_process.poll())
-
-    self.log.info("Importing more events")
-    buy_events = get_buy_events(60, 5)
-    rate_events = get_rate_events(60, 5)
-    for ev in buy_events + rate_events:
-      self.assertEquals(201, self.app.send_event(ev).status_code)
-
-    self.log.info("Training again")
-    self.app.train()
-
-    time.sleep(7)
-
-    self.log.info("Check serving")
-    r = self.app.query({"user": 1, "num": 5})
-    self.assertEqual(200, r.status_code)
-    result = r.json()
-    self.assertEqual(5, len(result['itemScores']))
-    r = self.app.query({"user": 5, "num": 3})
-    self.assertEqual(200, r.status_code)
-    result = r.json()
-    self.assertEqual(3, len(result['itemScores']))
-
-    self.log.info("Remove data")
-    self.app.delete_data()
-    self.log.info("Retraining should fail")
-    self.assertRaises(CalledProcessError, lambda: self.app.train())
-
-
-  def tearDown(self):
-    self.log.info("Stopping deployed engine")
-    self.app.stop()
-    self.log.info("Deleting all related data")
-    self.app.delete_data()
-    self.log.info("Removing an app")
-    self.app.delete()
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/scenarios/eventserver_test.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/scenarios/eventserver_test.py b/testing/pio_tests/scenarios/eventserver_test.py
deleted file mode 100644
index 8c243d2..0000000
--- a/testing/pio_tests/scenarios/eventserver_test.py
+++ /dev/null
@@ -1,155 +0,0 @@
-import unittest
-import requests
-import json
-import argparse
-from subprocess import Popen
-from utils import AppEngine, pjoin
-from pio_tests.integration import BaseTestCase, AppContext
-
-class EventserverTest(BaseTestCase):
-  """ Integration test for PredictionIO Eventserver API
-  Refer to below for further information:
-    http://docs.prediction.io/datacollection/eventmodel/
-    http://docs.prediction.io/datacollection/eventapi/
-  """
-  # Helper methods
-  def eventserver_url(self, path=None):
-    url = 'http://{}:{}'.format(
-            self.test_context.es_ip, self.test_context.es_port)
-    if path: url += '/{}'.format(path)
-    return url
-
-  def load_events(self, json_file):
-    file_path = pjoin(self.test_context.data_directory,
-        'eventserver_test/{}'.format(json_file))
-    return json.loads(open(file_path).read())
-
-
-  def setUp(self):
-    template_path = pjoin(
-        self.test_context.engine_directory, "recommendation-engine")
-    app_context = AppContext(
-        name="MyRecommender",
-        template=template_path)
-    self.app = AppEngine(self.test_context, app_context)
-
-  def runTest(self):
-    self.log.info("Check if Eventserver is alive and running")
-    r = requests.get(self.eventserver_url())
-    self.assertDictEqual(r.json(), {"status": "alive"})
-
-    self.log.info("Cannot view events with empty accessKey")
-    r = requests.get(self.eventserver_url(path='events.json'))
-    self.assertDictEqual(r.json(), {"message": "Missing accessKey."})
-
-    self.log.info("Cannot view events with invalid accessKey")
-    r = requests.get(self.eventserver_url(path='events.json'),
-        params={'accessKey': ''})
-    self.assertDictEqual(r.json(), {"message": "Invalid accessKey."})
-
-    self.log.info("Adding new pio application")
-    self.app.new()
-
-    self.log.info("No events have been sent yet")
-    r = self.app.get_events()
-    self.assertDictEqual(r.json(), {"message": "Not Found"})
-
-    # Testing POST
-    self.log.info("Sending single event")
-    event1 = {
-      'event' : 'test',
-      'entityType' : 'test',
-      'entityId' : 't1'
-    }
-    r = self.app.send_event(event1)
-    self.assertEqual(201, r.status_code)
-
-    self.log.info("Sending batch of events")
-    r = self.app.send_events_batch(
-        self.load_events("rate_events_25.json"))
-    self.assertEqual(200, r.status_code)
-
-    self.log.info("Cannot send more than 50 events per batch")
-    r = self.app.send_events_batch(
-        self.load_events("signup_events_51.json"))
-    self.assertEqual(400, r.status_code)
-
-    self.log.info("Importing events from file does not have batch size limit")
-    self.app.import_events_batch(
-        self.load_events("signup_events_51.json"))
-
-    self.log.info("Individual events may fail when sending events as batch")
-    r = self.app.send_events_batch(
-        self.load_events("partially_malformed_events.json"))
-    self.assertEqual(200, r.status_code)
-    self.assertEqual(201, r.json()[0]['status'])
-    self.assertEqual(400, r.json()[1]['status'])
-
-    # Testing GET for different parameters
-    params = {'event': 'rate'}
-    r = self.app.get_events(params=params)
-    self.assertEqual(20, len(r.json()))
-    self.assertEqual('rate', r.json()[0]['event'])
-
-    params = {
-      'event': 'rate',
-      'limit': -1 }
-    r = self.app.get_events(params=params)
-    self.assertEqual(25, len(r.json()))
-    self.assertEqual('rate', r.json()[0]['event'])
-
-    params = {
-      'event': 'rate',
-      'limit': 10 }
-    r = self.app.get_events(params=params)
-    self.assertEqual(10, len(r.json()))
-    self.assertEqual('rate', r.json()[0]['event'])
-
-    params = {
-      'event': 'rate',
-      'entityType': 'user',
-      'entityId': '1' }
-    r = self.app.get_events(params=params)
-    self.assertEqual(5, len(r.json()))
-    self.assertEqual('1', r.json()[0]['entityId'])
-
-    params = {
-      'event': 'rate',
-      'targetEntityType': 'item',
-      'targetEntityId': '1' }
-    r = self.app.get_events(params=params)
-    self.assertEqual(5, len(r.json()))
-    self.assertEqual('1', r.json()[0]['targetEntityId'])
-
-    params = {
-      'event': 'rate',
-      'entityType': 'user',
-      'entityId': '1',
-      'startTime': '2014-11-01T09:39:45.618-08:00',
-      'untilTime': '2014-11-04T09:39:45.618-08:00' }
-    r = self.app.get_events(params=params)
-    self.assertEqual(3, len(r.json()))
-    self.assertEqual('1', r.json()[0]['entityId'])
-
-    params = {
-      'event': 'rate',
-      'entityType': 'user',
-      'entityId': '1',
-      'reversed': 'true' }
-    r = self.app.get_events(params=params)
-    self.assertEqual(5, len(r.json()))
-    self.assertEqual('2014-11-05T09:39:45.618-08:00', r.json()[0]['eventTime'])
-
-  def tearDown(self):
-    self.log.info("Deleting all app data")
-    self.app.delete_data()
-    self.log.info("Deleting app")
-    self.app.delete()
-
-
-if __name__ == '__main__':
-  suite = unittest.TestSuite([BasicEventserverTest])
-  result = unittest.TextTestRunner(verbosity=2).run(suite)
-  if not result.wasSuccessful():
-    sys.exit(1)
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/scenarios/quickstart_test.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/scenarios/quickstart_test.py b/testing/pio_tests/scenarios/quickstart_test.py
deleted file mode 100644
index a083c2b..0000000
--- a/testing/pio_tests/scenarios/quickstart_test.py
+++ /dev/null
@@ -1,125 +0,0 @@
-import os
-import unittest
-import random
-import logging
-from pio_tests.integration import BaseTestCase, AppContext
-from utils import AppEngine, srun, pjoin
-
-def read_events(file_path):
-  RATE_ACTIONS_DELIMITER = "::"
-  with open(file_path, 'r') as f:
-    events = []
-    for line in f:
-      data = line.rstrip('\r\n').split(RATE_ACTIONS_DELIMITER)
-      if random.randint(0, 1) == 1:
-        events.append( {
-          "event": "rate",
-          "entityType": "user",
-          "entityId": data[0],
-          "targetEntityType": "item",
-          "targetEntityId": data[1],
-          "properties": { "rating" : float(data[2]) } })
-      else:
-        events.append({
-          "event": "buy",
-          "entityType": "user",
-          "entityId": data[0],
-          "targetEntityType": "item",
-          "targetEntityId": data[1] })
-
-    return events
-
-
-class QuickStartTest(BaseTestCase):
-
-  def setUp(self):
-    self.log.info("Setting up the engine")
-
-    template_path = pjoin(
-        self.test_context.engine_directory, "recommendation-engine")
-    engine_json_path = pjoin(
-        self.test_context.data_directory, "quickstart_test/engine.json")
-
-    self.training_data_path = pjoin(
-        self.test_context.data_directory,
-        "quickstart_test/training_data.txt")
-
-    # downloading training data
-    srun('curl https://raw.githubusercontent.com/apache/spark/master/' \
-            'data/mllib/sample_movielens_data.txt --create-dirs -o {}'
-            .format(self.training_data_path))
-
-    app_context = AppContext(
-        name="MyRecommender",
-        template=template_path,
-        engine_json_path=engine_json_path)
-
-    self.app = AppEngine(self.test_context, app_context)
-
-  def runTest(self):
-    self.log.info("Adding a new application")
-    self.app.new()
-
-    event1 = {
-      "event" : "rate",
-      "entityType" : "user",
-      "entityId" : "u0",
-      "targetEntityType" : "item",
-      "targetEntityId" : "i0",
-      "properties" : {
-        "rating" : 5
-      },
-      "eventTime" : "2014-11-02T09:39:45.618-08:00" }
-
-    event2 = {
-      "event" : "buy",
-      "entityType" : "user",
-      "entityId" : "u1",
-      "targetEntityType" : "item",
-      "targetEntityId" : "i2",
-      "eventTime" : "2014-11-10T12:34:56.123-08:00" }
-
-    self.log.info("Sending two test events")
-    self.assertListEqual(
-        [201, 201],
-        [self.app.send_event(e).status_code for e in [event1, event2]])
-
-    self.log.info("Checking the number of events stored on the server")
-    r = self.app.get_events()
-    self.assertEquals(200, r.status_code)
-    stored_events = r.json()
-    self.assertEqual(2, len(stored_events))
-
-    self.log.info("Importing many events")
-    new_events = read_events(self.training_data_path)
-    for ev in new_events:
-      r = self.app.send_event(ev)
-      self.assertEqual(201, r.status_code)
-
-    self.log.info("Checking the number of events stored on the server after the update")
-    r = self.app.get_events(params={'limit': -1})
-    self.assertEquals(200, r.status_code)
-    stored_events = r.json()
-    self.assertEquals(len(new_events) + 2, len(stored_events))
-
-    self.log.info("Building an engine...")
-    self.app.build()
-    self.log.info("Training...")
-    self.app.train()
-    self.log.info("Deploying and waiting 15s for it to start...")
-    self.app.deploy(wait_time=15)
-
-    self.log.info("Sending a single query and checking results")
-    user_query = { "user": 1, "num": 4 }
-    r = self.app.query(user_query)
-    self.assertEqual(200, r.status_code)
-    result = r.json()
-    self.assertEqual(4, len(result['itemScores']))
-
-  def tearDown(self):
-    self.log.info("Stopping deployed engine")
-    self.app.stop()
-    self.log.info("Deleting all related data")
-    self.app.delete_data()
-    self.log.info("Removing an app")
-    self.app.delete()

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/tests.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/tests.py b/testing/pio_tests/tests.py
deleted file mode 100755
index 33d9940..0000000
--- a/testing/pio_tests/tests.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import os
-import sys
-import unittest
-import argparse
-import logging
-import time
-from xmlrunner import XMLTestRunner
-import pio_tests.globals as globals
-from utils import srun_bg
-from pio_tests.integration import TestContext
-from pio_tests.scenarios.quickstart_test import QuickStartTest
-from pio_tests.scenarios.basic_app_usecases import BasicAppUsecases
-from pio_tests.scenarios.eventserver_test import EventserverTest
-
-parser = argparse.ArgumentParser(description='Integration tests for PredictionIO')
-parser.add_argument('--eventserver-ip', default='0.0.0.0')
-parser.add_argument('--eventserver-port', type=int, default=7070)
-parser.add_argument('--no-shell-stdout', action='store_true',
-    help='Suppress STDOUT output from shell executed commands')
-parser.add_argument('--no-shell-stderr', action='store_true',
-    help='Suppress STDERR output from shell executed commands')
-parser.add_argument('--logging', action='store', choices=['INFO', 'DEBUG', 'NO_LOGGING'],
-    default='INFO', help='Choose the logging level')
-parser.add_argument('--tests', nargs='*', type=str,
-    default=None, help='Names of the tests to execute. By default all tests will be checked')
-
-TESTS_DIRECTORY = os.path.abspath(os.path.dirname(__file__))
-ENGINE_DIRECTORY = os.path.join(TESTS_DIRECTORY, "engines")
-DATA_DIRECTORY = os.path.join(TESTS_DIRECTORY, "data")
-
-LOGGING_FORMAT = '[%(levelname)s] %(module)s %(asctime)-15s: %(message)s'
-logging.basicConfig(format=LOGGING_FORMAT)
-
-def get_tests(test_context):
-  # ========= ADD TESTS HERE!!! ================================
-  return {'QuickStart': QuickStartTest(test_context),
-          'BasicAppUsecases': BasicAppUsecases(test_context),
-          'EventserverTest': EventserverTest(test_context)}
-
-if __name__ == "__main__":
-  args = vars(parser.parse_args())
-
-  if args.get('no_shell_stdout'):
-    globals.SUPPRESS_STDOUT = True
-  if args.get('no_shell_stderr'):
-    globals.SUPPRESS_STDERR = True
-
-  # setting up logging
-  log_opt = args['logging']
-  logger = logging.getLogger(globals.LOGGER_NAME)
-  if log_opt == 'INFO':
-    logger.level = logging.INFO
-  elif log_opt == 'DEBUG':
-    logger.level = logging.DEBUG
-
-  test_context = TestContext(
-      ENGINE_DIRECTORY, DATA_DIRECTORY,
-      args['eventserver_ip'], int(args['eventserver_port']))
-
-  tests_dict = get_tests(test_context)
-  test_names = args['tests']
-  tests = []
-  if test_names is not None:
-    tests = [t for name, t in tests_dict.items() if name in test_names]
-  else:
-    tests = tests_dict.values()
-
-  # Actual tests execution
-  es_wait_time = 25
-  logger.info("Starting eventserver and waiting {}s for it to initialize".format(
-      es_wait_time))
-  event_server_process = srun_bg('pio eventserver --ip {} --port {}'
-      .format(test_context.es_ip, test_context.es_port))
-  time.sleep(es_wait_time)
-  result = XMLTestRunner(verbosity=2, output='test-reports').run(
-                unittest.TestSuite(tests))
-  event_server_process.kill()
-
-  if not result.wasSuccessful():
-    sys.exit(1)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/pio_tests/utils.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/utils.py b/testing/pio_tests/utils.py
deleted file mode 100644
index 629729e..0000000
--- a/testing/pio_tests/utils.py
+++ /dev/null
@@ -1,309 +0,0 @@
-import re
-import time
-import os
-import requests
-import json
-from shutil import copyfile
-from subprocess import run, Popen, check_output
-from os.path import join as pjoin
-import pio_tests.globals as globals
-
-def srun(command):
-  """ Runs a shell command given as a `str`
-  Raises: `subprocess.CalledProcessError` when exit code != 0
-  """
-  return run(command, shell=True, stdout=globals.std_out(),
-      stderr=globals.std_err(), check=True)
-
-def srun_out(command):
-  """ Runs a shell command given as a `str`
-  Returns: string with command's output
-  Raises: `subprocess.CalledProcessError` when exit code != 0
-  """
-  return check_output(command, shell=True, universal_newlines=True,
-      stderr=globals.std_err())
-
-def srun_bg(command):
-  """ Runs a shell command given as a `str` in the background
-  Returns: (obj: `subprocess.Popen`) for executed process
-  """
-  return Popen(command, shell=True, stdout=globals.std_out(),
-      stderr=globals.std_err())
-
-def repository_dirname(template):
-  """ Utility function getting repository name from the link
-  Example: for "https://github.com/user/SomeRepo" should return "SomeRepo"
-  """
-  return template.split('/')[-1]
-
-def obtain_template(engine_dir, template):
-  """Given a directory with engines and a template downloads an engine
-  if neccessary
-  Args:
-    engine_dir (str): directory where engines are stored
-    template (str): either the name of an engine from the engines directory
-        or a link to repository with the engine
-  Returns: str with the engine's path
-  """
-  if re.match('^https?:\/\/', template):
-    dest_dir = pjoin(engine_dir, repository_dirname(template))
-    if not os.path.exists(dest_dir):
-      srun('git clone --depth=1 {0} {1}'.format(template, dest_dir))
-    return dest_dir
-  else:
-    # check if exists
-    dest_dir = pjoin(engine_dir, template)
-    if not os.path.exists(dest_dir):
-      raise ValueError('Engine {0} does not exist in {1}'
-          .format(template, engine_dir))
-
-    return dest_dir
-
-def pio_app_list():
-  """Returns: a list of dicts for every application with the following keys:
-      `name`, `id`, `access_key`, `allowed_events`
-  """
-  output = srun_out('pio app list').rstrip()
-  return [ { 'name': line[2], 'id': int(line[4]),
-             'access_key': line[6], 'allowed_events': line[8] }
-          for line in [x.split() for x in output.split('\n')[1:-1]] ]
-
-def get_app_eventserver_url_json(test_context):
-  return 'http://{}:{}/events.json'.format(
-      test_context.es_ip, test_context.es_port)
-
-def get_engine_url_json(engine_ip, engine_port):
-  return 'http://{}:{}/queries.json'.format(
-      engine_ip, engine_port)
-
-def send_event(event, test_context, access_key, channel=None):
-  """ Sends an event to the eventserver
-  Args:
-    event: json-like dictionary describing an event
-    test_context (obj: `TestContext`):
-    access_key: application's access key
-    channel (str): custom channel for storing event
-  Returns: `requests.Response`
-  """
-  url = get_app_eventserver_url_json(test_context)
-  params = { 'accessKey': access_key }
-  if channel: params['channel'] = channel
-  return requests.post(
-      url,
-      params=params,
-      json=event)
-
-def send_events_batch(events, test_context, access_key, channel=None):
-  """ Send events in batch via REST to the eventserver
-  Args:
-    events: a list of json-like dictionaries for events
-    test_context (obj: `TestContext`):
-    access_key: application's access key
-    channel (str): custom channel for storing event
-  Returns: `requests.Response`
-  Requires: Events length must not exceed length of 50
-    http://docs.prediction.io/datacollection/eventmodel/#3.-batch-events-to-the-eventserver
-  """
-  url = 'http://{}:{}/batch/events.json'.format(
-      test_context.es_ip, test_context.es_port)
-  params = { 'accessKey': access_key }
-  if channel: params['channel'] = channel
-  return requests.post(
-      url,
-      params=params,
-      json=events)
-
-
-def import_events_batch(events, test_context, appid, channel=None):
-  """ Imports events in batch from file with `pio import`
-  Args:
-    events: a list of json-like dictionaries for events
-    test_context (obj: `TestContext`)
-    appid (int): application's id
-    channel (str): custom channel for storing event
-  """
-  # Writing events list to temporary file.
-  # `pio import` requires each line of input file to be a JSON string
-  # representing an event. Empty lines are not allowed.
-  contents = ''
-  for ev in events:
-      contents += '{}\n'.format(json.dumps(ev))
-  contents.rstrip('\n')
-
-  file_path = pjoin(test_context.data_directory, 'events.json.tmp')
-  try:
-      with open(file_path, 'w') as f:
-          f.write(contents)
-      srun('pio import --appid {} --input {} {}'.format(
-          appid,
-          file_path,
-          '--channel {}'.format(channel) if channel else ''))
-  finally:
-      os.remove(file_path)
-
-def get_events(test_context, access_key, params={}):
-  """ Gets events for some application
-  Args:
-    test_context (obj: `TestContext`)
-    access_key (str):
-    params (dict): special parameters for eventserver's GET, e.g:
-        'limit', 'reversed', 'event'. See the docs
-  Returns: `requests.Response`
-  """
-  url = get_app_eventserver_url_json(test_context)
-  return requests.get(url, params=dict({'accessKey': access_key}, **params))
-
-def query_engine(data, engine_ip='localhost', engine_port=8000):
-  """ Send a query to deployed engine
-  Args:
-    data (dict): json-like dictionary being an input to an engine
-    access_key (str):
-    engine_ip (str): ip of deployed engine
-    engine_port (int): port of deployed engine
-  Returns: `requests.Response`
-  """
-  url = get_engine_url_json(engine_ip, engine_port)
-  return requests.post(url, json=data)
-
-class AppEngine:
-  """ This is a utility class simplifying all app related interactions.
-  Basically it is just a wrapper on other utility functions and shell
-  scripts.
-  """
-
-  def __init__(self, test_context, app_context, already_created=False):
-    """ Args:
-        test_context (obj: `TestContext`)
-        app_context (obj: `AppContext`)
-        already_created (bool): True if the given app has been already added
-    """
-    self.test_context = test_context
-    self.app_context = app_context
-    self.engine_path = obtain_template(
-        self.test_context.engine_directory, app_context.template)
-    self.deployed_process = None
-    if already_created:
-      self.__init_info()
-    else:
-      self.id = None
-      self.access_key = None
-      self.description = None
-
-    if self.app_context.engine_json_path:
-      self.__copy_engine_json()
-
-  def __copy_engine_json(self):
-    to_path = pjoin(self.engine_path, 'engine.json')
-    copyfile(self.app_context.engine_json_path, to_path)
-
-  def __init_info(self):
-    info = self.show()
-    self.id = info['id']
-    self.access_key = info['access_key']
-    self.description = info['description']
-
-  def new(self, id=None, description=None, access_key=None):
-    """ Creates a new application with given parameters """
-    srun('pio app new {} {} {} {}'.format(
-        '--id {}'.format(id) if id else '',
-        '--description \"{}\"'.format(description) if description else '',
-        '--access-key {}'.format(access_key) if access_key else '',
-        self.app_context.name))
-
-    self.__init_info()
-
-
-  def show(self):
-    """ Returns: application info in dictionary with the keys:
-         `name`: str, `id`: int, `description`: str,
-         `access_key`: str, `allowed_events`: str
-    """
-    output = srun_out('pio app show {}'.format(self.app_context.name)).rstrip()
-    lines = [x.split() for x in output.split('\n')]
-    return { 'name': lines[0][3],
-             'id': int(lines[1][4]),
-             'description': lines[2][3] if len(lines[2]) >= 4 else '',
-             'access_key': lines[3][4],
-             'allowed_events': lines[3][5] }
-
-
-  # deletes this app from pio
-  def delete(self):
-    srun('pio app delete {0} --force'.format(self.app_context.name))
-
-  def build(self, sbt_extra=None, clean=False, no_asm=True):
-    srun('cd {0}; pio build {1} {2} {3}'.format(
-        self.engine_path,
-        '--sbt-extra {}'.format(sbt_extra) if sbt_extra else '',
-        '--clean' if clean else '',
-        '--no-asm' if no_asm else ''))
-
-  def train(self, batch=None, skip_sanity_check=False, stop_after_read=False,
-          stop_after_prepare=False, engine_factory=None,
-          engine_params_key=None, scratch_uri=None):
-
-    srun('cd {}; pio train {} {} {} {} {} {} {}'.format(
-        self.engine_path,
-        '--batch {}'.format(batch) if batch else '',
-        '--skip-sanity-check' if skip_sanity_check else '',
-        '--stop-after-read' if stop_after_read else '',
-        '--stop-after-prepare' if stop_after_prepare else '',
-        '--engine_factory {}'.format(engine_factory) if engine_factory else '',
-        '--engine-params-key {}'.format(engine_params_key) if engine_params_key else '',
-        '--scratch-uri {}'.format(scratch_uri) if scratch_uri else ''))
-
-  def deploy(self, wait_time=0, ip=None, port=None, engine_instance_id=None,
-          feedback=False, accesskey=None, event_server_ip=None, event_server_port=None,
-          batch=None, scratch_uri=None):
-
-    command = 'cd {}; pio deploy {} {} {} {} {} {} {} {} {}'.format(
-            self.engine_path,
-            '--ip {}'.format(ip) if ip else '',
-            '--port {}'.format(port) if port else '',
-            '--engine-instance-id {}'.format(engine_instance_id) if engine_instance_id else '',
-            '--feedback' if feedback else '',
-            '--accesskey {}'.format(accesskey) if accesskey else '',
-            '--event-server-ip {}'.format(event_server_ip) if event_server_ip else '',
-            '--event-server-port {}'.format(event_server_port) if event_server_port else '',
-            '--batch {}'.format(bach) if batch else '',
-            '--scratch-uri {}'.format(scratch_uri) if scratch_uri else '')
-
-    self.deployed_process = srun_bg(command)
-    time.sleep(wait_time)
-    if self.deployed_process.poll() is not None:
-      raise Exception('Application engine terminated')
-    self.ip = ip if ip else 'localhost'
-    self.port = port if port else 8000
-
-  def stop(self):
-    """ Kills deployed engine """
-    if self.deployed_process:
-      self.deployed_process.kill()
-
-  def new_channel(self, channel):
-    srun('pio app channel-new {0}'.format(channel))
-
-  def delete_channel(self, channel):
-    srun('pio app channel-delete {0} --force'.format(channel))
-
-  def send_event(self, event):
-    return send_event(event, self.test_context, self.access_key)
-
-  def send_events_batch(self, events):
-    return send_events_batch(events, self.test_context, self.access_key)
-
-  def import_events_batch(self, events):
-    return import_events_batch(events, self.test_context, self.id)
-
-  def get_events(self, params={}):
-    return get_events(self.test_context, self.access_key, params)
-
-  def delete_data(self, delete_all=True, channel=None):
-    srun('pio app data-delete {0} {1} {2} --force'
-        .format(
-            self.app_context.name,
-            '--all' if delete_all else '',
-            '--channel ' + channel if channel is not None else ''))
-
-  def query(self, data):
-    return query_engine(data, self.ip, self.port)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/run_docker.sh
----------------------------------------------------------------------
diff --git a/testing/run_docker.sh b/testing/run_docker.sh
deleted file mode 100755
index d5925ef..0000000
--- a/testing/run_docker.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash -
-
-USAGE=$"Usage: run_docer <meta> <event> <model> <pio> <command>
-  Where:
-    meta         = [PGSQL,ELASTICSEARCH]
-    event        = [PGSQL,HBASE]
-    model        = [PGSQL,LOCALFS,HDFS]
-    pio          = path to PredictionIO directory
-    command      = command to run in the container"
-
-if ! [[ "$1" =~ ^(PGSQL|ELASTICSEARCH)$ ]]; then
-  echo "$USAGE"
-  exit 1
-fi
-
-if ! [[ "$2" =~ ^(PGSQL|HBASE)$ ]]; then
-  echo "$USAGE"
-  exit 1
-fi
-
-if ! [[ "$3" =~ ^(PGSQL|LOCALFS|HDFS)$ ]]; then
-  echo "$USAGE"
-  exit 1
-fi
-
-if [ ! -d "$4" ]; then
-  echo "Directory $4 does not exist"
-  echo "$USAGE"
-  exit 1
-fi
-
-docker run -it -h localhost \
-  -v $4:/pio_host \
-  -v ~/.ivy2:/root/.ivy2 \
-  -e PIO_STORAGE_REPOSITORIES_METADATA_SOURCE=$1 \
-  -e PIO_STORAGE_REPOSITORIES_EVENTDATA_SOURCE=$2 \
-  -e PIO_STORAGE_REPOSITORIES_MODELDATA_SOURCE=$3 \
-  -p 8000:8000 -p 7070:7070 -p 8080:8080 -p 8081:8081 -p 4040:4040 \
-  -p 60000:60000 -p 60010:60010 -p 60020:60020 -p 60030:60030 ziemin/pio-testing $5

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/testing/script.travis.sh
----------------------------------------------------------------------
diff --git a/testing/script.travis.sh b/testing/script.travis.sh
deleted file mode 100755
index 2361f61..0000000
--- a/testing/script.travis.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash -
-
-set -e
-
-if [[ $BUILD_TYPE == Unit ]]; then
-  # Prepare pio environment variables
-  set -a
-  source conf/pio-env.sh.travis
-  set +a
-
-  # Run stylecheck
-  sbt scalastyle
-  # Run all unit tests
-  sbt test
-
-else
-  REPO=`pwd`
-
-  ./testing/run_docker.sh $METADATA_REP $EVENTDATA_REP $MODELDATA_REP \
-    $REPO 'python3 /tests/pio_tests/tests.py'
-fi

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/Dockerfile
----------------------------------------------------------------------
diff --git a/tests/Dockerfile b/tests/Dockerfile
new file mode 100644
index 0000000..fc3d89d
--- /dev/null
+++ b/tests/Dockerfile
@@ -0,0 +1,89 @@
+from ubuntu
+
+ENV SPARK_VERSION 1.4.0
+ENV ELASTICSEARCH_VERSION 1.4.4
+ENV HBASE_VERSION 1.0.0
+
+RUN echo "== Updating system =="
+RUN apt-get update -y
+RUN echo "== Downloading packages =="
+RUN apt-get install -y \
+    wget curl \
+    python-pip \
+    python3-pip \
+    postgresql postgresql-contrib \
+    openjdk-8-jdk \
+    openssh-client openssh-server
+
+RUN pip install predictionio
+RUN pip3 install --upgrade pip
+RUN pip3 install xmlrunner
+RUN pip3 install --upgrade requests
+RUN pip3 install --upgrade urllib3
+
+ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64/jre
+
+RUN echo "== Installing Spark =="
+RUN mkdir vendors
+RUN wget http://d3kbcqa49mib13.cloudfront.net/spark-${SPARK_VERSION}-bin-hadoop2.6.tgz
+RUN tar zxvfC spark-${SPARK_VERSION}-bin-hadoop2.6.tgz /vendors
+RUN rm spark-${SPARK_VERSION}-bin-hadoop2.6.tgz
+ENV SPARK_HOME /vendors/spark-${SPARK_VERSION}-bin-hadoop2.6
+
+RUN echo "== Installing Elasticsearch =="
+RUN wget https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz
+RUN tar zxvfC elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz /vendors
+RUN rm elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz
+ENV ELASTICSEARCH_HOME /vendors/elasticsearch-${ELASTICSEARCH_VERSION}
+
+RUN echo "== Installing HBase =="
+RUN wget http://archive.apache.org/dist/hbase/hbase-${HBASE_VERSION}/hbase-${HBASE_VERSION}-bin.tar.gz
+RUN tar zxvfC hbase-${HBASE_VERSION}-bin.tar.gz /vendors
+RUN rm hbase-${HBASE_VERSION}-bin.tar.gz
+ENV HBASE_HOME /vendors/hbase-${HBASE_VERSION}
+
+RUN echo "== Downloading database drivers =="
+RUN mkdir drivers
+RUN wget https://jdbc.postgresql.org/download/postgresql-9.4-1204.jdbc41.jar -P /drivers
+
+RUN mkdir PredictionIO
+ENV PIO_HOME /PredictionIO
+ENV PATH ${PIO_HOME}/bin/:${PATH}
+ENV HOST_PIO_HOME /pio_host
+
+RUN echo "== Setting configs =="
+COPY docker-files/init.sh init.sh
+COPY docker-files/env-conf/spark-env.sh ${SPARK_HOME}/conf/spark-env.sh
+COPY docker-files/env-conf/hbase-site.xml ${HBASE_HOME}/conf/hbase-site.xml
+COPY docker-files/env-conf/pio-env.sh /pio-env.sh
+
+# Default repositories setup
+ENV PIO_STORAGE_REPOSITORIES_METADATA_SOURCE PGSQL
+ENV PIO_STORAGE_REPOSITORIES_EVENTDATA_SOURCE PGSQL
+ENV PIO_STORAGE_REPOSITORIES_MODELDATA_SOURCE PGSQL
+
+# JVM settings
+ENV JVM_OPTS '-Dfile.encoding=UTF8 -Xms2048M -Xmx2048M -Xss8M -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256M'
+
+# Expose relevant ports
+# pio engine
+EXPOSE 8000
+# eventserver
+EXPOSE 7070
+# spark master UI
+EXPOSE 8080
+# spark worker UI
+EXPOSE 8081
+# spark context UI
+EXPOSE 4040
+# HMaster
+EXPOSE 60000
+# HMaster Info Web UI
+EXPOSE 60010
+# Region Server
+Expose 60020
+# Region Server Http
+EXPOSE 60030
+
+ENTRYPOINT ["/init.sh"]
+CMD 'bash'

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/README.md
----------------------------------------------------------------------
diff --git a/tests/README.md b/tests/README.md
new file mode 100644
index 0000000..a9e5dea
--- /dev/null
+++ b/tests/README.md
@@ -0,0 +1,31 @@
+# Testing PredictionIO
+
+Intention of this subdirectory is to amass different types of tests other than unit-tests and also to make developers life easier giving them with means to check the application deterministically for different configurations.
+Moreover, it provides testing scenarios for **TravisCI** to be run on pull requests and commits.
+
+
+## Integration Tests
+These tests are mostly user-functionality tests. They check logic and reliability of the system.
+In order to get familiar with their structure, please see [README](pio_tests/README.md).
+
+## Docker image
+After introducing some changes, a developer would like to try them against different configurations, namely to see if everything works as expected e.g. when you change the data repository for the events or meta-data.
+A good way to that is to use the docker image with installed and running dependencies.
+
+To download the image run:
+```
+$ docker pull ziemin/pio-testing
+```
+
+The most convenient way to make use of it is to execute ***run_docker.sh*** script passing it the configuration, the path to PredictionIO's repository with archived snapshot and the command to run. When no command is provided it opens a bash shell inside the docker image. Example of usage:
+```sh
+$ ./run_docker.sh ELASTICSEARCH HBASE LOCALFS \ 
+    ~/projects/incubator-preadictionio "echo 'All tests passed...'"
+```
+
+Directory structure inside the image:
+* ***/PredictionIO*** - extracted snapshot
+* ***/pio_host*** - mounted path to repository
+* ***/tests/pio_tests*** - copy of integration tests
+* ***/vendors*** - directory with installed services
+* ***/drivers*** - jars with database drivers

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/after_script.travis.sh
----------------------------------------------------------------------
diff --git a/tests/after_script.travis.sh b/tests/after_script.travis.sh
new file mode 100755
index 0000000..fdc635f
--- /dev/null
+++ b/tests/after_script.travis.sh
@@ -0,0 +1,7 @@
+#!/bin/bash -
+
+set -e
+
+if [[ $BUILD_TYPE == Unit ]]; then
+  ./bin/travis/pio-stop-travis
+fi

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/before_script.travis.sh
----------------------------------------------------------------------
diff --git a/tests/before_script.travis.sh b/tests/before_script.travis.sh
new file mode 100755
index 0000000..d7f9cef
--- /dev/null
+++ b/tests/before_script.travis.sh
@@ -0,0 +1,25 @@
+#!/bin/bash -
+
+set -e
+
+if [[ $BUILD_TYPE == Unit ]]; then
+
+  # Download spark, hbase
+  mkdir vendors
+  wget http://d3kbcqa49mib13.cloudfront.net/spark-1.3.0-bin-hadoop2.4.tgz
+  tar zxfC spark-1.3.0-bin-hadoop2.4.tgz vendors
+  wget http://archive.apache.org/dist/hbase/hbase-1.0.0/hbase-1.0.0-bin.tar.gz
+  tar zxfC hbase-1.0.0-bin.tar.gz vendors
+
+  # Prepare pio environment variables
+  set -a
+  source conf/pio-env.sh.travis
+  set +a
+
+  # Create postgres database for PredictionIO
+  psql -c 'create database predictionio;' -U postgres
+  ./bin/travis/pio-start-travis
+
+else # Integration Tests
+  ./make-distribution.sh
+fi

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/docker-files/env-conf/hbase-site.xml
----------------------------------------------------------------------
diff --git a/tests/docker-files/env-conf/hbase-site.xml b/tests/docker-files/env-conf/hbase-site.xml
new file mode 100644
index 0000000..af3ab4f
--- /dev/null
+++ b/tests/docker-files/env-conf/hbase-site.xml
@@ -0,0 +1,12 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration>
+  <property>
+    <name>hbase.rootdir</name>
+    <value>file:///hbase-files/data</value>
+  </property>
+  <property>
+    <name>hbase.zookeeper.property.dataDir</name>
+    <value>/hbase-files/zookeeper</value>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/docker-files/env-conf/pio-env.sh
----------------------------------------------------------------------
diff --git a/tests/docker-files/env-conf/pio-env.sh b/tests/docker-files/env-conf/pio-env.sh
new file mode 100644
index 0000000..8391e97
--- /dev/null
+++ b/tests/docker-files/env-conf/pio-env.sh
@@ -0,0 +1,87 @@
+#!/usr/bin/env bash
+
+# Copy this file as pio-env.sh and edit it for your site's configuration.
+
+# PredictionIO Main Configuration
+#
+# This section controls core behavior of PredictionIO. It is very likely that
+# you need to change these to fit your site.
+
+# SPARK_HOME: Apache Spark is a hard dependency and must be configured.
+SPARK_HOME=$SPARK_HOME
+
+POSTGRES_JDBC_DRIVER=/drivers/postgresql-9.4-1204.jdbc41.jar
+MYSQL_JDBC_DRIVER=
+
+# ES_CONF_DIR: You must configure this if you have advanced configuration for
+#              your Elasticsearch setup.
+# ES_CONF_DIR=/opt/elasticsearch
+
+# HADOOP_CONF_DIR: You must configure this if you intend to run PredictionIO
+#                  with Hadoop 2.
+# HADOOP_CONF_DIR=/opt/hadoop
+
+# HBASE_CONF_DIR: You must configure this if you intend to run PredictionIO
+#                 with HBase on a remote cluster.
+HBASE_CONF_DIR=$HBASE_HOME/conf
+
+# Filesystem paths where PredictionIO uses as block storage.
+PIO_FS_BASEDIR=$HOME/.pio_store
+PIO_FS_ENGINESDIR=$PIO_FS_BASEDIR/engines
+PIO_FS_TMPDIR=$PIO_FS_BASEDIR/tmp
+
+# PredictionIO Storage Configuration
+#
+# This section controls programs that make use of PredictionIO's built-in
+# storage facilities. Default values are shown below.
+#
+# For more information on storage configuration please refer to
+# https://docs.prediction.io/system/anotherdatastore/
+
+# Storage Repositories
+
+# Default is to use PostgreSQL
+PIO_STORAGE_REPOSITORIES_METADATA_NAME=pio_meta
+PIO_STORAGE_REPOSITORIES_METADATA_SOURCE=$PIO_STORAGE_REPOSITORIES_METADATA_SOURCE
+
+PIO_STORAGE_REPOSITORIES_EVENTDATA_NAME=pio_event
+PIO_STORAGE_REPOSITORIES_EVENTDATA_SOURCE=$PIO_STORAGE_REPOSITORIES_EVENTDATA_SOURCE
+
+PIO_STORAGE_REPOSITORIES_MODELDATA_NAME=pio_model
+PIO_STORAGE_REPOSITORIES_MODELDATA_SOURCE=$PIO_STORAGE_REPOSITORIES_MODELDATA_SOURCE
+
+# Storage Data Sources
+
+# PostgreSQL Default Settings
+# Please change "pio" to your database name in PIO_STORAGE_SOURCES_PGSQL_URL
+# Please change PIO_STORAGE_SOURCES_PGSQL_USERNAME and
+# PIO_STORAGE_SOURCES_PGSQL_PASSWORD accordingly
+PIO_STORAGE_SOURCES_PGSQL_TYPE=jdbc
+PIO_STORAGE_SOURCES_PGSQL_URL=jdbc:postgresql://localhost/pio
+PIO_STORAGE_SOURCES_PGSQL_USERNAME=pio
+PIO_STORAGE_SOURCES_PGSQL_PASSWORD=pio
+
+# MySQL Example
+# PIO_STORAGE_SOURCES_MYSQL_TYPE=jdbc
+# PIO_STORAGE_SOURCES_MYSQL_URL=jdbc:mysql://localhost/pio
+# PIO_STORAGE_SOURCES_MYSQL_USERNAME=pio
+# PIO_STORAGE_SOURCES_MYSQL_PASSWORD=pio
+
+# Elasticsearch Example
+PIO_STORAGE_SOURCES_ELASTICSEARCH_TYPE=elasticsearch
+#PIO_STORAGE_SOURCES_ELASTICSEARCH_CLUSTERNAME=pio
+PIO_STORAGE_SOURCES_ELASTICSEARCH_HOSTS=localhost
+PIO_STORAGE_SOURCES_ELASTICSEARCH_PORTS=9300
+PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME=$ELASTICSEARCH_HOME
+
+# Local File System Example
+PIO_STORAGE_SOURCES_LOCALFS_TYPE=localfs
+PIO_STORAGE_SOURCES_LOCALFS_PATH=$PIO_FS_BASEDIR/local_models
+
+# HBase Example
+PIO_STORAGE_SOURCES_HBASE_TYPE=hbase
+PIO_STORAGE_SOURCES_HBASE_HOME=$HBASE_HOME
+
+# HDFS config
+PIO_STORAGE_SOURCES_HDFS_TYPE=hdfs
+PIO_STORAGE_SOURCES_HDFS_PATH=/hdfs_models

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/docker-files/env-conf/spark-defaults.conf
----------------------------------------------------------------------
diff --git a/tests/docker-files/env-conf/spark-defaults.conf b/tests/docker-files/env-conf/spark-defaults.conf
new file mode 100644
index 0000000..fcb1b15
--- /dev/null
+++ b/tests/docker-files/env-conf/spark-defaults.conf
@@ -0,0 +1,13 @@
+# Default system properties included when running spark-submit.
+# This is useful for setting default environmental settings.
+
+# Example:
+# spark.master                     spark://master:7077
+# spark.eventLog.enabled           true
+# spark.eventLog.dir               hdfs://namenode:8021/directory
+# spark.serializer                 org.apache.spark.serializer.KryoSerializer
+spark.driver.memory              10g
+spark.executor.memory            10g
+spark.driver.cores               4
+spark.ui.port                    4040
+# spark.executor.extraJavaOptions  -XX:+PrintGCDetails -Dkey=value -Dnumbers="one two three"

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/docker-files/env-conf/spark-env.sh
----------------------------------------------------------------------
diff --git a/tests/docker-files/env-conf/spark-env.sh b/tests/docker-files/env-conf/spark-env.sh
new file mode 100755
index 0000000..22e7a9c
--- /dev/null
+++ b/tests/docker-files/env-conf/spark-env.sh
@@ -0,0 +1,49 @@
+#!/usr/bin/env bash
+
+# Options read when launching programs locally with
+# ./bin/run-example or ./bin/spark-submit
+# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files
+# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node
+# - SPARK_PUBLIC_DNS, to set the public dns name of the driver program
+# - SPARK_CLASSPATH, default classpath entries to append
+
+# Options read by executors and drivers running inside the cluster
+# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node
+# - SPARK_PUBLIC_DNS, to set the public DNS name of the driver program
+# - SPARK_CLASSPATH, default classpath entries to append
+# - SPARK_LOCAL_DIRS, storage directories to use on this node for shuffle and RDD data
+# - MESOS_NATIVE_JAVA_LIBRARY, to point to your libmesos.so if you use Mesos
+
+# Options read in YARN client mode
+# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files
+# - SPARK_EXECUTOR_INSTANCES, Number of workers to start (Default: 2)
+# - SPARK_EXECUTOR_CORES, Number of cores for the workers (Default: 1).
+# - SPARK_EXECUTOR_MEMORY, Memory per Worker (e.g. 1000M, 2G) (Default: 1G)
+# - SPARK_DRIVER_MEMORY, Memory for Master (e.g. 1000M, 2G) (Default: 1G)
+# - SPARK_YARN_APP_NAME, The name of your application (Default: Spark)
+# - SPARK_YARN_QUEUE, The hadoop queue to use for allocation requests (Default: ‘default’)
+# - SPARK_YARN_DIST_FILES, Comma separated list of files to be distributed with the job.
+# - SPARK_YARN_DIST_ARCHIVES, Comma separated list of archives to be distributed with the job.
+
+# Options for the daemons used in the standalone deploy mode
+# - SPARK_MASTER_IP, to bind the master to a different IP address or hostname
+# - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports for the master
+# - SPARK_MASTER_OPTS, to set config properties only for the master (e.g. "-Dx=y")
+# - SPARK_WORKER_CORES, to set the number of cores to use on this machine
+# - SPARK_WORKER_MEMORY, to set how much total memory workers have to give executors (e.g. 1000m, 2g)
+# - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT, to use non-default ports for the worker
+# - SPARK_WORKER_INSTANCES, to set the number of worker processes per node
+# - SPARK_WORKER_DIR, to set the working directory of worker processes
+# - SPARK_WORKER_OPTS, to set config properties only for the worker (e.g. "-Dx=y")
+# - SPARK_DAEMON_MEMORY, to allocate to the master, worker and history server themselves (default: 1g).
+# - SPARK_HISTORY_OPTS, to set config properties only for the history server (e.g. "-Dx=y")
+# - SPARK_SHUFFLE_OPTS, to set config properties only for the external shuffle service (e.g. "-Dx=y")
+# - SPARK_DAEMON_JAVA_OPTS, to set config properties for all daemons (e.g. "-Dx=y")
+# - SPARK_PUBLIC_DNS, to set the public dns name of the master or workers
+
+# Generic options for the daemons used in the standalone deploy mode
+# - SPARK_CONF_DIR      Alternate conf dir. (Default: ${SPARK_HOME}/conf)
+# - SPARK_LOG_DIR       Where log files are stored.  (Default: ${SPARK_HOME}/logs)
+# - SPARK_PID_DIR       Where the pid file is stored. (Default: /tmp)
+# - SPARK_IDENT_STRING  A string representing this instance of spark. (Default: $USER)
+# - SPARK_NICENESS      The scheduling priority for daemons. (Default: 0)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/docker-files/init.sh
----------------------------------------------------------------------
diff --git a/tests/docker-files/init.sh b/tests/docker-files/init.sh
new file mode 100755
index 0000000..de50f3f
--- /dev/null
+++ b/tests/docker-files/init.sh
@@ -0,0 +1,40 @@
+#!/bin/bash -
+
+set -e
+
+echo '== Setting up Postgres... =='
+service postgresql start
+runuser postgres -c 'createuser -s root'
+createdb root
+
+psql -c "create user pio with password 'pio'" && createdb pio
+
+echo '== Starting SSH... =='
+service ssh start
+ssh-keygen -b 2048 -t rsa -q -f /root/.ssh/id_rsa -N ""
+cat /root/.ssh/id_rsa.pub >> /root/.ssh/authorized_keys
+
+echo '== Starting HBase... =='
+$HBASE_HOME/bin/start-hbase.sh
+
+echo '== Starting standalone Spark cluster... =='
+$SPARK_HOME/sbin/start-all.sh
+
+echo '== Starting Elasticsearch... =='
+$ELASTICSEARCH_HOME/bin/elasticsearch -d -p $PIO_HOME/es.pid
+
+echo '== Copying distribution to PIO_HOME... =='
+DISTRIBUTION_TAR=`find /pio_host -maxdepth 1 -name PredictionIO*SNAPSHOT.tar.gz | head -1`
+tar zxvfC $DISTRIBUTION_TAR /
+DIR_NAME=/`basename $DISTRIBUTION_TAR`
+DIR_NAME=${DIR_NAME%.tar.gz}
+mv $DIR_NAME/* $PIO_HOME/
+mv /pio-env.sh $PIO_HOME/conf/pio-env.sh
+
+echo '== Copying tests to a separate directory =='
+mkdir /tests
+cp -r /pio_host/tests/pio_tests /tests/pio_tests
+export PYTHONPATH=/tests:$PYTHONPATH
+
+# after initialization run given command
+eval $@

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/README.md
----------------------------------------------------------------------
diff --git a/tests/pio_tests/README.md b/tests/pio_tests/README.md
new file mode 100644
index 0000000..c885ab5
--- /dev/null
+++ b/tests/pio_tests/README.md
@@ -0,0 +1,43 @@
+# PredictionIO - Integration Tests
+
+This python module introduces a basic framework for adding integration tests to
+PredictionIO. It is nothing more than a collection of utility functions mostly being wrappers
+over shell executed commands.
+
+### Prerequisites
+In order to execute tests, besides a configured **PredictionIO** environment one
+has to download the following python-3 packages:
+* requests
+* unittest
+* xmlrunner
+
+### Execution
+*tests.py* - the executable script. Launches eventserver to be available for the tests.
+You can pass it arguments to:
+* suppress the output of executed shell commands within the tests
+* enable logging
+* specify which tests should be exectued (by names)
+
+For more information run:
+```shell
+python3 tests.py -h
+```
+
+As soon as the tests are finishied an XML file with JUnit-like test reports 
+is created in the directory of execution.
+
+### Adding new tests
+Every test should be an instance of **pio_tests.integration.BaseTestCase** defined in **pio_tests.integration**.  
+Upon creation, a **pio_tests.integration.TestContext**  object is provided to it with description of:
+* ip address and a port of running eventserver
+* directories containing stored engines and data for specific tests
+
+Every test should be registered in the appropriate place in *tests.py* file, whereas
+its definition should reside in **pio_tests.scenarios** module. If the test requires some additional files
+during the execution, you should put them under *data* directory mentioned above.
+
+The best way to test different application engines is to make use of **pio_tests.utility.AppEngine**.
+Apart from containing utility functions, it downloads engine templates if necessary.
+
+To see an example of implemented test check **pio_tests.scenarios.quickstart_test**, which is
+a repetition of the QuickStart tutorial from the doc site.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/__init__.py
----------------------------------------------------------------------
diff --git a/tests/pio_tests/__init__.py b/tests/pio_tests/__init__.py
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/data/eventserver_test/partially_malformed_events.json
----------------------------------------------------------------------
diff --git a/tests/pio_tests/data/eventserver_test/partially_malformed_events.json b/tests/pio_tests/data/eventserver_test/partially_malformed_events.json
new file mode 100644
index 0000000..f95bae4
--- /dev/null
+++ b/tests/pio_tests/data/eventserver_test/partially_malformed_events.json
@@ -0,0 +1,10 @@
+[
+  { 
+    "event" : "test",
+    "entityType" : "test",
+    "entityId" : "t2"
+  },
+  {
+    "event" : "malformed-event" 
+  }
+]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/data/eventserver_test/rate_events_25.json
----------------------------------------------------------------------
diff --git a/tests/pio_tests/data/eventserver_test/rate_events_25.json b/tests/pio_tests/data/eventserver_test/rate_events_25.json
new file mode 100644
index 0000000..3b97285
--- /dev/null
+++ b/tests/pio_tests/data/eventserver_test/rate_events_25.json
@@ -0,0 +1,278 @@
+[
+  {
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "1",
+    "targetEntityType" : "item",
+    "targetEntityId" : "1",
+    "properties" : {
+      "rating" : 5
+    },
+    "eventTime" : "2014-11-01T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "1",
+    "targetEntityType" : "item",
+    "targetEntityId" : "2",
+    "properties" : {
+      "rating" : 3
+    },
+    "eventTime" : "2014-11-02T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "1",
+    "targetEntityType" : "item",
+    "targetEntityId" : "3",
+    "properties" : {
+      "rating" : 1
+    },
+    "eventTime" : "2014-11-03T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "1",
+    "targetEntityType" : "item",
+    "targetEntityId" : "4",
+    "properties" : {
+      "rating" : 5
+    },
+    "eventTime" : "2014-11-04T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "1",
+    "targetEntityType" : "item",
+    "targetEntityId" : "5",
+    "properties" : {
+      "rating" : 3
+    },
+    "eventTime" : "2014-11-05T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "2",
+    "targetEntityType" : "item",
+    "targetEntityId" : "1",
+    "properties" : {
+      "rating" : 1
+    },
+    "eventTime" : "2014-11-01T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "2",
+    "targetEntityType" : "item",
+    "targetEntityId" : "2",
+    "properties" : {
+      "rating" : 5
+    },
+    "eventTime" : "2014-11-02T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "2",
+    "targetEntityType" : "item",
+    "targetEntityId" : "3",
+    "properties" : {
+      "rating" : 3
+    },
+    "eventTime" : "2014-11-03T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "2",
+    "targetEntityType" : "item",
+    "targetEntityId" : "4",
+    "properties" : {
+      "rating" : 3
+    },
+    "eventTime" : "2014-11-04T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "2",
+    "targetEntityType" : "item",
+    "targetEntityId" : "5",
+    "properties" : {
+      "rating" : 4
+    },
+    "eventTime" : "2014-11-05T09:39:45.618-08:00" 
+  },
+  {
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "3",
+    "targetEntityType" : "item",
+    "targetEntityId" : "1",
+    "properties" : {
+      "rating" : 5
+    },
+    "eventTime" : "2014-11-01T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "3",
+    "targetEntityType" : "item",
+    "targetEntityId" : "2",
+    "properties" : {
+      "rating" : 2
+    },
+    "eventTime" : "2014-11-02T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "3",
+    "targetEntityType" : "item",
+    "targetEntityId" : "3",
+    "properties" : {
+      "rating" : 1
+    },
+    "eventTime" : "2014-11-03T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "3",
+    "targetEntityType" : "item",
+    "targetEntityId" : "4",
+    "properties" : {
+      "rating" : 5
+    },
+    "eventTime" : "2014-11-04T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "3",
+    "targetEntityType" : "item",
+    "targetEntityId" : "5",
+    "properties" : {
+      "rating" : 3
+    },
+    "eventTime" : "2014-11-05T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "4",
+    "targetEntityType" : "item",
+    "targetEntityId" : "1",
+    "properties" : {
+      "rating" : 3
+    },
+    "eventTime" : "2014-11-01T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "4",
+    "targetEntityType" : "item",
+    "targetEntityId" : "2",
+    "properties" : {
+      "rating" : 5
+    },
+    "eventTime" : "2014-11-02T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "4",
+    "targetEntityType" : "item",
+    "targetEntityId" : "3",
+    "properties" : {
+      "rating" : 4
+    },
+    "eventTime" : "2014-11-03T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "4",
+    "targetEntityType" : "item",
+    "targetEntityId" : "4",
+    "properties" : {
+      "rating" : 2
+    },
+    "eventTime" : "2014-11-04T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "4",
+    "targetEntityType" : "item",
+    "targetEntityId" : "5",
+    "properties" : {
+      "rating" : 4
+    },
+    "eventTime" : "2014-11-05T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "5",
+    "targetEntityType" : "item",
+    "targetEntityId" : "1",
+    "properties" : {
+      "rating" : 2
+    },
+    "eventTime" : "2014-11-01T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "5",
+    "targetEntityType" : "item",
+    "targetEntityId" : "2",
+    "properties" : {
+      "rating" : 5
+    },
+    "eventTime" : "2014-11-02T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "5",
+    "targetEntityType" : "item",
+    "targetEntityId" : "3",
+    "properties" : {
+      "rating" : 3
+    },
+    "eventTime" : "2014-11-03T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "5",
+    "targetEntityType" : "item",
+    "targetEntityId" : "4",
+    "properties" : {
+      "rating" : 5
+    },
+    "eventTime" : "2014-11-04T09:39:45.618-08:00" 
+  },
+  { 
+    "event" : "rate",
+    "entityType" : "user",
+    "entityId" : "5",
+    "targetEntityType" : "item",
+    "targetEntityId" : "5",
+    "properties" : {
+      "rating" : 4
+    },
+    "eventTime" : "2014-11-05T09:39:45.618-08:00" 
+  }
+]
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/data/eventserver_test/signup_events_51.json
----------------------------------------------------------------------
diff --git a/tests/pio_tests/data/eventserver_test/signup_events_51.json b/tests/pio_tests/data/eventserver_test/signup_events_51.json
new file mode 100644
index 0000000..d8c31bd
--- /dev/null
+++ b/tests/pio_tests/data/eventserver_test/signup_events_51.json
@@ -0,0 +1,257 @@
+[
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "1"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "2"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "3"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "4"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "5"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "6"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "7"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "8"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "9"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "10"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "11"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "12"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "13"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "14"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "15"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "16"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "17"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "18"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "19"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "20"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "21"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "22"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "23"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "24"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "25"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "26"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "27"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "28"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "29"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "30"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "31"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "32"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "33"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "34"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "35"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "36"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "37"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "38"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "39"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "40"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "41"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "42"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "43"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "44"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "45"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "46"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "47"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "48"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "49"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "50"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "51"
+  }
+]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/data/quickstart_test/engine.json
----------------------------------------------------------------------
diff --git a/tests/pio_tests/data/quickstart_test/engine.json b/tests/pio_tests/data/quickstart_test/engine.json
new file mode 100644
index 0000000..c7b6b4b
--- /dev/null
+++ b/tests/pio_tests/data/quickstart_test/engine.json
@@ -0,0 +1,21 @@
+{
+  "id": "default",
+  "description": "Default settings",
+  "engineFactory": "org.template.recommendation.RecommendationEngine",
+  "datasource": {
+    "params" : {
+      "appName": "MyRecommender"
+    }
+  },
+  "algorithms": [
+    {
+      "name": "als",
+      "params": {
+        "rank": 10,
+        "numIterations": 10,
+        "lambda": 0.01,
+        "seed": 3
+      }
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/engines/recommendation-engine/README.md
----------------------------------------------------------------------
diff --git a/tests/pio_tests/engines/recommendation-engine/README.md b/tests/pio_tests/engines/recommendation-engine/README.md
new file mode 100644
index 0000000..6566db4
--- /dev/null
+++ b/tests/pio_tests/engines/recommendation-engine/README.md
@@ -0,0 +1,42 @@
+# Recommendation Template
+
+## Documentation
+
+Please refer to http://docs.prediction.io/templates/recommendation/quickstart/
+
+## Versions
+
+### develop
+
+### v0.3.2
+
+- Fix incorrect top items in batchPredict() (issue #5)
+
+### v0.3.1
+
+- Add Evaluation module and modify DataSource for it
+
+### v0.3.0
+
+- update for PredictionIO 0.9.2, including:
+
+  - use new PEventStore API
+  - use appName in DataSource parameter
+
+### v0.2.0
+
+- update build.sbt and template.json for PredictionIO 0.9.2
+
+### v0.1.2
+
+- update for PredictionIO 0.9.0
+
+### v0.1.1
+
+- Persist RDD to memory (.cache()) in DataSource for better performance and quick fix for new user/item ID BiMap error issue.
+
+### v0.1.0
+
+- initial version
+- known issue:
+  * If importing new events of new users/itesm during training, the new user/item id can't be found in the BiMap.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/engines/recommendation-engine/build.sbt
----------------------------------------------------------------------
diff --git a/tests/pio_tests/engines/recommendation-engine/build.sbt b/tests/pio_tests/engines/recommendation-engine/build.sbt
new file mode 100644
index 0000000..c7413bb
--- /dev/null
+++ b/tests/pio_tests/engines/recommendation-engine/build.sbt
@@ -0,0 +1,12 @@
+import AssemblyKeys._
+
+assemblySettings
+
+name := "template-scala-parallel-recommendation"
+
+organization := "org.apache.predictionio"
+
+libraryDependencies ++= Seq(
+  "org.apache.predictionio"    %% "core"          % pioVersion.value % "provided",
+  "org.apache.spark" %% "spark-core"    % "1.3.0" % "provided",
+  "org.apache.spark" %% "spark-mllib"   % "1.3.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/engines/recommendation-engine/data/import_eventserver.py
----------------------------------------------------------------------
diff --git a/tests/pio_tests/engines/recommendation-engine/data/import_eventserver.py b/tests/pio_tests/engines/recommendation-engine/data/import_eventserver.py
new file mode 100644
index 0000000..0a1e109
--- /dev/null
+++ b/tests/pio_tests/engines/recommendation-engine/data/import_eventserver.py
@@ -0,0 +1,56 @@
+"""
+Import sample data for recommendation engine
+"""
+
+import predictionio
+import argparse
+import random
+
+RATE_ACTIONS_DELIMITER = "::"
+SEED = 3
+
+def import_events(client, file):
+  f = open(file, 'r')
+  random.seed(SEED)
+  count = 0
+  print "Importing data..."
+  for line in f:
+    data = line.rstrip('\r\n').split(RATE_ACTIONS_DELIMITER)
+    # For demonstration purpose, randomly mix in some buy events
+    if (random.randint(0, 1) == 1):
+      client.create_event(
+        event="rate",
+        entity_type="user",
+        entity_id=data[0],
+        target_entity_type="item",
+        target_entity_id=data[1],
+        properties= { "rating" : float(data[2]) }
+      )
+    else:
+      client.create_event(
+        event="buy",
+        entity_type="user",
+        entity_id=data[0],
+        target_entity_type="item",
+        target_entity_id=data[1]
+      )
+    count += 1
+  f.close()
+  print "%s events are imported." % count
+
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser(
+    description="Import sample data for recommendation engine")
+  parser.add_argument('--access_key', default='invald_access_key')
+  parser.add_argument('--url', default="http://localhost:7070")
+  parser.add_argument('--file', default="./data/sample_movielens_data.txt")
+
+  args = parser.parse_args()
+  print args
+
+  client = predictionio.EventClient(
+    access_key=args.access_key,
+    url=args.url,
+    threads=5,
+    qsize=500)
+  import_events(client, args.file)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/engines/recommendation-engine/data/sample_movielens_data.txt
----------------------------------------------------------------------
diff --git a/tests/pio_tests/engines/recommendation-engine/data/sample_movielens_data.txt b/tests/pio_tests/engines/recommendation-engine/data/sample_movielens_data.txt
new file mode 100644
index 0000000..f0eee19
--- /dev/null
+++ b/tests/pio_tests/engines/recommendation-engine/data/sample_movielens_data.txt
@@ -0,0 +1,1501 @@
+0::2::3
+0::3::1
+0::5::2
+0::9::4
+0::11::1
+0::12::2
+0::15::1
+0::17::1
+0::19::1
+0::21::1
+0::23::1
+0::26::3
+0::27::1
+0::28::1
+0::29::1
+0::30::1
+0::31::1
+0::34::1
+0::37::1
+0::41::2
+0::44::1
+0::45::2
+0::46::1
+0::47::1
+0::48::1
+0::50::1
+0::51::1
+0::54::1
+0::55::1
+0::59::2
+0::61::2
+0::64::1
+0::67::1
+0::68::1
+0::69::1
+0::71::1
+0::72::1
+0::77::2
+0::79::1
+0::83::1
+0::87::1
+0::89::2
+0::91::3
+0::92::4
+0::94::1
+0::95::2
+0::96::1
+0::98::1
+0::99::1
+1::2::2
+1::3::1
+1::4::2
+1::6::1
+1::9::3
+1::12::1
+1::13::1
+1::14::1
+1::16::1
+1::19::1
+1::21::3
+1::27::1
+1::28::3
+1::33::1
+1::36::2
+1::37::1
+1::40::1
+1::41::2
+1::43::1
+1::44::1
+1::47::1
+1::50::1
+1::54::1
+1::56::2
+1::57::1
+1::58::1
+1::60::1
+1::62::4
+1::63::1
+1::67::1
+1::68::4
+1::70::2
+1::72::1
+1::73::1
+1::74::2
+1::76::1
+1::77::3
+1::78::1
+1::81::1
+1::82::1
+1::85::3
+1::86::2
+1::88::2
+1::91::1
+1::92::2
+1::93::1
+1::94::2
+1::96::1
+1::97::1
+2::4::3
+2::6::1
+2::8::5
+2::9::1
+2::10::1
+2::12::3
+2::13::1
+2::15::2
+2::18::2
+2::19::4
+2::22::1
+2::26::1
+2::28::1
+2::34::4
+2::35::1
+2::37::5
+2::38::1
+2::39::5
+2::40::4
+2::47::1
+2::50::1
+2::52::2
+2::54::1
+2::55::1
+2::57::2
+2::58::2
+2::59::1
+2::61::1
+2::62::1
+2::64::1
+2::65::1
+2::66::3
+2::68::1
+2::71::3
+2::76::1
+2::77::1
+2::78::1
+2::80::1
+2::83::5
+2::85::1
+2::87::2
+2::88::1
+2::89::4
+2::90::1
+2::92::4
+2::93::5
+3::0::1
+3::1::1
+3::2::1
+3::7::3
+3::8::3
+3::9::1
+3::14::1
+3::15::1
+3::16::1
+3::18::4
+3::19::1
+3::24::3
+3::26::1
+3::29::3
+3::33::1
+3::34::3
+3::35::1
+3::36::3
+3::37::1
+3::38::2
+3::43::1
+3::44::1
+3::46::1
+3::47::1
+3::51::5
+3::52::3
+3::56::1
+3::58::1
+3::60::3
+3::62::1
+3::65::2
+3::66::1
+3::67::1
+3::68::2
+3::70::1
+3::72::2
+3::76::3
+3::79::3
+3::80::4
+3::81::1
+3::83::1
+3::84::1
+3::86::1
+3::87::2
+3::88::4
+3::89::1
+3::91::1
+3::94::3
+4::1::1
+4::6::1
+4::8::1
+4::9::1
+4::10::1
+4::11::1
+4::12::1
+4::13::1
+4::14::2
+4::15::1
+4::17::1
+4::20::1
+4::22::1
+4::23::1
+4::24::1
+4::29::4
+4::30::1
+4::31::1
+4::34::1
+4::35::1
+4::36::1
+4::39::2
+4::40::3
+4::41::4
+4::43::2
+4::44::1
+4::45::1
+4::46::1
+4::47::1
+4::49::2
+4::50::1
+4::51::1
+4::52::4
+4::54::1
+4::55::1
+4::60::3
+4::61::1
+4::62::4
+4::63::3
+4::65::1
+4::67::2
+4::69::1
+4::70::4
+4::71::1
+4::73::1
+4::78::1
+4::84::1
+4::85::1
+4::87::3
+4::88::3
+4::89::2
+4::96::1
+4::97::1
+4::98::1
+4::99::1
+5::0::1
+5::1::1
+5::4::1
+5::5::1
+5::8::1
+5::9::3
+5::10::2
+5::13::3
+5::15::1
+5::19::1
+5::20::3
+5::21::2
+5::23::3
+5::27::1
+5::28::1
+5::29::1
+5::31::1
+5::36::3
+5::38::2
+5::39::1
+5::42::1
+5::48::3
+5::49::4
+5::50::3
+5::51::1
+5::52::1
+5::54::1
+5::55::5
+5::56::3
+5::58::1
+5::60::1
+5::61::1
+5::64::3
+5::65::2
+5::68::4
+5::70::1
+5::71::1
+5::72::1
+5::74::1
+5::79::1
+5::81::2
+5::84::1
+5::85::1
+5::86::1
+5::88::1
+5::90::4
+5::91::2
+5::95::2
+5::99::1
+6::0::1
+6::1::1
+6::2::3
+6::5::1
+6::6::1
+6::9::1
+6::10::1
+6::15::2
+6::16::2
+6::17::1
+6::18::1
+6::20::1
+6::21::1
+6::22::1
+6::24::1
+6::25::5
+6::26::1
+6::28::1
+6::30::1
+6::33::1
+6::38::1
+6::39::1
+6::43::4
+6::44::1
+6::45::1
+6::48::1
+6::49::1
+6::50::1
+6::53::1
+6::54::1
+6::55::1
+6::56::1
+6::58::4
+6::59::1
+6::60::1
+6::61::3
+6::63::3
+6::66::1
+6::67::3
+6::68::1
+6::69::1
+6::71::2
+6::73::1
+6::75::1
+6::77::1
+6::79::1
+6::81::1
+6::84::1
+6::85::3
+6::86::1
+6::87::1
+6::88::1
+6::89::1
+6::91::2
+6::94::1
+6::95::2
+6::96::1
+7::1::1
+7::2::2
+7::3::1
+7::4::1
+7::7::1
+7::10::1
+7::11::2
+7::14::2
+7::15::1
+7::16::1
+7::18::1
+7::21::1
+7::22::1
+7::23::1
+7::25::5
+7::26::1
+7::29::4
+7::30::1
+7::31::3
+7::32::1
+7::33::1
+7::35::1
+7::37::2
+7::39::3
+7::40::2
+7::42::2
+7::44::1
+7::45::2
+7::47::4
+7::48::1
+7::49::1
+7::53::1
+7::54::1
+7::55::1
+7::56::1
+7::59::1
+7::61::2
+7::62::3
+7::63::2
+7::66::1
+7::67::3
+7::74::1
+7::75::1
+7::76::3
+7::77::1
+7::81::1
+7::82::1
+7::84::2
+7::85::4
+7::86::1
+7::92::2
+7::96::1
+7::97::1
+7::98::1
+8::0::1
+8::2::4
+8::3::2
+8::4::2
+8::5::1
+8::7::1
+8::9::1
+8::11::1
+8::15::1
+8::18::1
+8::19::1
+8::21::1
+8::29::5
+8::31::3
+8::33::1
+8::35::1
+8::36::1
+8::40::2
+8::44::1
+8::45::1
+8::50::1
+8::51::1
+8::52::5
+8::53::5
+8::54::1
+8::55::1
+8::56::1
+8::58::4
+8::60::3
+8::62::4
+8::64::1
+8::67::3
+8::69::1
+8::71::1
+8::72::3
+8::77::3
+8::78::1
+8::79::1
+8::83::1
+8::85::5
+8::86::1
+8::88::1
+8::90::1
+8::92::2
+8::95::4
+8::96::3
+8::97::1
+8::98::1
+8::99::1
+9::2::3
+9::3::1
+9::4::1
+9::5::1
+9::6::1
+9::7::5
+9::9::1
+9::12::1
+9::14::3
+9::15::1
+9::19::1
+9::21::1
+9::22::1
+9::24::1
+9::25::1
+9::26::1
+9::30::3
+9::32::4
+9::35::2
+9::36::2
+9::37::2
+9::38::1
+9::39::1
+9::43::3
+9::49::5
+9::50::3
+9::53::1
+9::54::1
+9::58::1
+9::59::1
+9::60::1
+9::61::1
+9::63::3
+9::64::3
+9::68::1
+9::69::1
+9::70::3
+9::71::1
+9::73::2
+9::75::1
+9::77::2
+9::81::2
+9::82::1
+9::83::1
+9::84::1
+9::86::1
+9::87::4
+9::88::1
+9::90::3
+9::94::2
+9::95::3
+9::97::2
+9::98::1
+10::0::3
+10::2::4
+10::4::3
+10::7::1
+10::8::1
+10::10::1
+10::13::2
+10::14::1
+10::16::2
+10::17::1
+10::18::1
+10::21::1
+10::22::1
+10::24::1
+10::25::3
+10::28::1
+10::35::1
+10::36::1
+10::37::1
+10::38::1
+10::39::1
+10::40::4
+10::41::2
+10::42::3
+10::43::1
+10::49::3
+10::50::1
+10::51::1
+10::52::1
+10::55::2
+10::56::1
+10::58::1
+10::63::1
+10::66::1
+10::67::2
+10::68::1
+10::75::1
+10::77::1
+10::79::1
+10::86::1
+10::89::3
+10::90::1
+10::97::1
+10::98::1
+11::0::1
+11::6::2
+11::9::1
+11::10::1
+11::11::1
+11::12::1
+11::13::4
+11::16::1
+11::18::5
+11::19::4
+11::20::1
+11::21::1
+11::22::1
+11::23::5
+11::25::1
+11::27::5
+11::30::5
+11::32::5
+11::35::3
+11::36::2
+11::37::2
+11::38::4
+11::39::1
+11::40::1
+11::41::1
+11::43::2
+11::45::1
+11::47::1
+11::48::5
+11::50::4
+11::51::3
+11::59::1
+11::61::1
+11::62::1
+11::64::1
+11::66::4
+11::67::1
+11::69::5
+11::70::1
+11::71::3
+11::72::3
+11::75::3
+11::76::1
+11::77::1
+11::78::1
+11::79::5
+11::80::3
+11::81::4
+11::82::1
+11::86::1
+11::88::1
+11::89::1
+11::90::4
+11::94::2
+11::97::3
+11::99::1
+12::2::1
+12::4::1
+12::6::1
+12::7::3
+12::8::1
+12::14::1
+12::15::2
+12::16::4
+12::17::5
+12::18::2
+12::21::1
+12::22::2
+12::23::3
+12::24::1
+12::25::1
+12::27::5
+12::30::2
+12::31::4
+12::35::5
+12::38::1
+12::41::1
+12::44::2
+12::45::1
+12::50::4
+12::51::1
+12::52::1
+12::53::1
+12::54::1
+12::56::2
+12::57::1
+12::60::1
+12::63::1
+12::64::5
+12::66::3
+12::67::1
+12::70::1
+12::72::1
+12::74::1
+12::75::1
+12::77::1
+12::78::1
+12::79::3
+12::82::2
+12::83::1
+12::84::1
+12::85::1
+12::86::1
+12::87::1
+12::88::1
+12::91::3
+12::92::1
+12::94::4
+12::95::2
+12::96::1
+12::98::2
+13::0::1
+13::3::1
+13::4::2
+13::5::1
+13::6::1
+13::12::1
+13::14::2
+13::15::1
+13::17::1
+13::18::3
+13::20::1
+13::21::1
+13::22::1
+13::26::1
+13::27::1
+13::29::3
+13::31::1
+13::33::1
+13::40::2
+13::43::2
+13::44::1
+13::45::1
+13::49::1
+13::51::1
+13::52::2
+13::53::3
+13::54::1
+13::62::1
+13::63::2
+13::64::1
+13::68::1
+13::71::1
+13::72::3
+13::73::1
+13::74::3
+13::77::2
+13::78::1
+13::79::2
+13::83::3
+13::85::1
+13::86::1
+13::87::2
+13::88::2
+13::90::1
+13::93::4
+13::94::1
+13::98::1
+13::99::1
+14::1::1
+14::3::3
+14::4::1
+14::5::1
+14::6::1
+14::7::1
+14::9::1
+14::10::1
+14::11::1
+14::12::1
+14::13::1
+14::14::3
+14::15::1
+14::16::1
+14::17::1
+14::20::1
+14::21::1
+14::24::1
+14::25::2
+14::27::1
+14::28::1
+14::29::5
+14::31::3
+14::34::1
+14::36::1
+14::37::2
+14::39::2
+14::40::1
+14::44::1
+14::45::1
+14::47::3
+14::48::1
+14::49::1
+14::51::1
+14::52::5
+14::53::3
+14::54::1
+14::55::1
+14::56::1
+14::62::4
+14::63::5
+14::67::3
+14::68::1
+14::69::3
+14::71::1
+14::72::4
+14::73::1
+14::76::5
+14::79::1
+14::82::1
+14::83::1
+14::88::1
+14::93::3
+14::94::1
+14::95::2
+14::96::4
+14::98::1
+15::0::1
+15::1::4
+15::2::1
+15::5::2
+15::6::1
+15::7::1
+15::13::1
+15::14::1
+15::15::1
+15::17::2
+15::19::2
+15::22::2
+15::23::2
+15::25::1
+15::26::3
+15::27::1
+15::28::2
+15::29::1
+15::32::1
+15::33::2
+15::34::1
+15::35::2
+15::36::1
+15::37::1
+15::39::1
+15::42::1
+15::46::5
+15::48::2
+15::50::2
+15::51::1
+15::52::1
+15::58::1
+15::62::1
+15::64::3
+15::65::2
+15::72::1
+15::73::1
+15::74::1
+15::79::1
+15::80::1
+15::81::1
+15::82::2
+15::85::1
+15::87::1
+15::91::2
+15::96::1
+15::97::1
+15::98::3
+16::2::1
+16::5::3
+16::6::2
+16::7::1
+16::9::1
+16::12::1
+16::14::1
+16::15::1
+16::19::1
+16::21::2
+16::29::4
+16::30::2
+16::32::1
+16::34::1
+16::36::1
+16::38::1
+16::46::1
+16::47::3
+16::48::1
+16::49::1
+16::50::1
+16::51::5
+16::54::5
+16::55::1
+16::56::2
+16::57::1
+16::60::1
+16::63::2
+16::65::1
+16::67::1
+16::72::1
+16::74::1
+16::80::1
+16::81::1
+16::82::1
+16::85::5
+16::86::1
+16::90::5
+16::91::1
+16::93::1
+16::94::3
+16::95::2
+16::96::3
+16::98::3
+16::99::1
+17::2::1
+17::3::1
+17::6::1
+17::10::4
+17::11::1
+17::13::2
+17::17::5
+17::19::1
+17::20::5
+17::22::4
+17::28::1
+17::29::1
+17::33::1
+17::34::1
+17::35::2
+17::37::1
+17::38::1
+17::45::1
+17::46::5
+17::47::1
+17::49::3
+17::51::1
+17::55::5
+17::56::3
+17::57::1
+17::58::1
+17::59::1
+17::60::1
+17::63::1
+17::66::1
+17::68::4
+17::69::1
+17::70::1
+17::72::1
+17::73::3
+17::78::1
+17::79::1
+17::82::2
+17::84::1
+17::90::5
+17::91::3
+17::92::1
+17::93::1
+17::94::4
+17::95::2
+17::97::1
+18::1::1
+18::4::3
+18::5::2
+18::6::1
+18::7::1
+18::10::1
+18::11::4
+18::12::2
+18::13::1
+18::15::1
+18::18::1
+18::20::1
+18::21::2
+18::22::1
+18::23::2
+18::25::1
+18::26::1
+18::27::1
+18::28::5
+18::29::1
+18::31::1
+18::32::1
+18::36::1
+18::38::5
+18::39::5
+18::40::1
+18::42::1
+18::43::1
+18::44::4
+18::46::1
+18::47::1
+18::48::1
+18::51::2
+18::55::1
+18::56::1
+18::57::1
+18::62::1
+18::63::1
+18::66::3
+18::67::1
+18::70::1
+18::75::1
+18::76::3
+18::77::1
+18::80::3
+18::81::3
+18::82::1
+18::83::5
+18::84::1
+18::97::1
+18::98::1
+18::99::2
+19::0::1
+19::1::1
+19::2::1
+19::4::1
+19::6::2
+19::11::1
+19::12::1
+19::14::1
+19::23::1
+19::26::1
+19::31::1
+19::32::4
+19::33::1
+19::34::1
+19::37::1
+19::38::1
+19::41::1
+19::43::1
+19::45::1
+19::48::1
+19::49::1
+19::50::2
+19::53::2
+19::54::3
+19::55::1
+19::56::2
+19::58::1
+19::61::1
+19::62::1
+19::63::1
+19::64::1
+19::65::1
+19::69::2
+19::72::1
+19::74::3
+19::76::1
+19::78::1
+19::79::1
+19::81::1
+19::82::1
+19::84::1
+19::86::1
+19::87::2
+19::90::4
+19::93::1
+19::94::4
+19::95::2
+19::96::1
+19::98::4
+20::0::1
+20::1::1
+20::2::2
+20::4::2
+20::6::1
+20::8::1
+20::12::1
+20::21::2
+20::22::5
+20::24::2
+20::25::1
+20::26::1
+20::29::2
+20::30::2
+20::32::2
+20::39::1
+20::40::1
+20::41::2
+20::45::2
+20::48::1
+20::50::1
+20::51::3
+20::53::3
+20::55::1
+20::57::2
+20::60::1
+20::61::1
+20::64::1
+20::66::1
+20::70::2
+20::72::1
+20::73::2
+20::75::4
+20::76::1
+20::77::4
+20::78::1
+20::79::1
+20::84::2
+20::85::2
+20::88::3
+20::89::1
+20::90::3
+20::91::1
+20::92::2
+20::93::1
+20::94::4
+20::97::1
+21::0::1
+21::2::4
+21::3::1
+21::7::2
+21::11::1
+21::12::1
+21::13::1
+21::14::3
+21::17::1
+21::19::1
+21::20::1
+21::21::1
+21::22::1
+21::23::1
+21::24::1
+21::27::1
+21::29::5
+21::30::2
+21::38::1
+21::40::2
+21::43::3
+21::44::1
+21::45::1
+21::46::1
+21::48::1
+21::51::1
+21::53::5
+21::54::1
+21::55::1
+21::56::1
+21::58::3
+21::59::3
+21::64::1
+21::66::1
+21::68::1
+21::71::1
+21::73::1
+21::74::4
+21::80::1
+21::81::1
+21::83::1
+21::84::1
+21::85::3
+21::87::4
+21::89::2
+21::92::2
+21::96::3
+21::99::1
+22::0::1
+22::3::2
+22::5::2
+22::6::2
+22::9::1
+22::10::1
+22::11::1
+22::13::1
+22::14::1
+22::16::1
+22::18::3
+22::19::1
+22::22::5
+22::25::1
+22::26::1
+22::29::3
+22::30::5
+22::32::4
+22::33::1
+22::35::1
+22::36::3
+22::37::1
+22::40::1
+22::41::3
+22::44::1
+22::45::2
+22::48::1
+22::51::5
+22::55::1
+22::56::2
+22::60::3
+22::61::1
+22::62::4
+22::63::1
+22::65::1
+22::66::1
+22::68::4
+22::69::4
+22::70::3
+22::71::1
+22::74::5
+22::75::5
+22::78::1
+22::80::3
+22::81::1
+22::82::1
+22::84::1
+22::86::1
+22::87::3
+22::88::5
+22::90::2
+22::92::3
+22::95::2
+22::96::2
+22::98::4
+22::99::1
+23::0::1
+23::2::1
+23::4::1
+23::6::2
+23::10::4
+23::12::1
+23::13::4
+23::14::1
+23::15::1
+23::18::4
+23::22::2
+23::23::4
+23::24::1
+23::25::1
+23::26::1
+23::27::5
+23::28::1
+23::29::1
+23::30::4
+23::32::5
+23::33::2
+23::36::3
+23::37::1
+23::38::1
+23::39::1
+23::43::1
+23::48::5
+23::49::5
+23::50::4
+23::53::1
+23::55::5
+23::57::1
+23::59::1
+23::60::1
+23::61::1
+23::64::4
+23::65::5
+23::66::2
+23::67::1
+23::68::3
+23::69::1
+23::72::1
+23::73::3
+23::77::1
+23::82::2
+23::83::1
+23::84::1
+23::85::1
+23::87::3
+23::88::1
+23::95::2
+23::97::1
+24::4::1
+24::6::3
+24::7::1
+24::10::2
+24::12::1
+24::15::1
+24::19::1
+24::24::1
+24::27::3
+24::30::5
+24::31::1
+24::32::3
+24::33::1
+24::37::1
+24::39::1
+24::40::1
+24::42::1
+24::43::3
+24::45::2
+24::46::1
+24::47::1
+24::48::1
+24::49::1
+24::50::1
+24::52::5
+24::57::1
+24::59::4
+24::63::4
+24::65::1
+24::66::1
+24::67::1
+24::68::3
+24::69::5
+24::71::1
+24::72::4
+24::77::4
+24::78::1
+24::80::1
+24::82::1
+24::84::1
+24::86::1
+24::87::1
+24::88::2
+24::89::1
+24::90::5
+24::91::1
+24::92::1
+24::94::2
+24::95::1
+24::96::5
+24::98::1
+24::99::1
+25::1::3
+25::2::1
+25::7::1
+25::9::1
+25::12::3
+25::16::3
+25::17::1
+25::18::1
+25::20::1
+25::22::1
+25::23::1
+25::26::2
+25::29::1
+25::30::1
+25::31::2
+25::33::4
+25::34::3
+25::35::2
+25::36::1
+25::37::1
+25::40::1
+25::41::1
+25::43::1
+25::47::4
+25::50::1
+25::51::1
+25::53::1
+25::56::1
+25::58::2
+25::64::2
+25::67::2
+25::68::1
+25::70::1
+25::71::4
+25::73::1
+25::74::1
+25::76::1
+25::79::1
+25::82::1
+25::84::2
+25::85::1
+25::91::3
+25::92::1
+25::94::1
+25::95::1
+25::97::2
+26::0::1
+26::1::1
+26::2::1
+26::3::1
+26::4::4
+26::5::2
+26::6::3
+26::7::5
+26::13::3
+26::14::1
+26::16::1
+26::18::3
+26::20::1
+26::21::3
+26::22::5
+26::23::5
+26::24::5
+26::27::1
+26::31::1
+26::35::1
+26::36::4
+26::40::1
+26::44::1
+26::45::2
+26::47::1
+26::48::1
+26::49::3
+26::50::2
+26::52::1
+26::54::4
+26::55::1
+26::57::3
+26::58::1
+26::61::1
+26::62::2
+26::66::1
+26::68::4
+26::71::1
+26::73::4
+26::76::1
+26::81::3
+26::85::1
+26::86::3
+26::88::5
+26::91::1
+26::94::5
+26::95::1
+26::96::1
+26::97::1
+27::0::1
+27::9::1
+27::10::1
+27::18::4
+27::19::3
+27::20::1
+27::22::2
+27::24::2
+27::25::1
+27::27::3
+27::28::1
+27::29::1
+27::31::1
+27::33::3
+27::40::1
+27::42::1
+27::43::1
+27::44::3
+27::45::1
+27::51::3
+27::52::1
+27::55::3
+27::57::1
+27::59::1
+27::60::1
+27::61::1
+27::64::1
+27::66::3
+27::68::1
+27::70::1
+27::71::2
+27::72::1
+27::75::3
+27::78::1
+27::80::3
+27::82::1
+27::83::3
+27::86::1
+27::87::2
+27::90::1
+27::91::1
+27::92::1
+27::93::1
+27::94::2
+27::95::1
+27::98::1
+28::0::3
+28::1::1
+28::2::4
+28::3::1
+28::6::1
+28::7::1
+28::12::5
+28::13::2
+28::14::1
+28::15::1
+28::17::1
+28::19::3
+28::20::1
+28::23::3
+28::24::3
+28::27::1
+28::29::1
+28::33::1
+28::34::1
+28::36::1
+28::38::2
+28::39::2
+28::44::1
+28::45::1
+28::49::4
+28::50::1
+28::52::1
+28::54::1
+28::56::1
+28::57::3
+28::58::1
+28::59::1
+28::60::1
+28::62::3
+28::63::1
+28::65::1
+28::75::1
+28::78::1
+28::81::5
+28::82::4
+28::83::1
+28::85::1
+28::88::2
+28::89::4
+28::90::1
+28::92::5
+28::94::1
+28::95::2
+28::98::1
+28::99::1
+29::3::1
+29::4::1
+29::5::1
+29::7::2
+29::9::1
+29::10::3
+29::11::1
+29::13::3
+29::14::1
+29::15::1
+29::17::3
+29::19::3
+29::22::3
+29::23::4
+29::25::1
+29::29::1
+29::31::1
+29::32::4
+29::33::2
+29::36::2
+29::38::3
+29::39::1
+29::42::1
+29::46::5
+29::49::3
+29::51::2
+29::59::1
+29::61::1
+29::62::1
+29::67::1
+29::68::3
+29::69::1
+29::70::1
+29::74::1
+29::75::1
+29::79::2
+29::80::1
+29::81::2
+29::83::1
+29::85::1
+29::86::1
+29::90::4
+29::93::1
+29::94::4
+29::97::1
+29::99::1

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/engines/recommendation-engine/data/send_query.py
----------------------------------------------------------------------
diff --git a/tests/pio_tests/engines/recommendation-engine/data/send_query.py b/tests/pio_tests/engines/recommendation-engine/data/send_query.py
new file mode 100644
index 0000000..7eaf53e
--- /dev/null
+++ b/tests/pio_tests/engines/recommendation-engine/data/send_query.py
@@ -0,0 +1,7 @@
+"""
+Send sample query to prediction engine
+"""
+
+import predictionio
+engine_client = predictionio.EngineClient(url="http://localhost:8000")
+print engine_client.send_query({"user": "1", "num": 4})

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/engines/recommendation-engine/engine.json
----------------------------------------------------------------------
diff --git a/tests/pio_tests/engines/recommendation-engine/engine.json b/tests/pio_tests/engines/recommendation-engine/engine.json
new file mode 100644
index 0000000..8d53d56
--- /dev/null
+++ b/tests/pio_tests/engines/recommendation-engine/engine.json
@@ -0,0 +1,21 @@
+{
+  "id": "default",
+  "description": "Default settings",
+  "engineFactory": "org.template.recommendation.RecommendationEngine",
+  "datasource": {
+    "params" : {
+      "appName": "MyApp1"
+    }
+  },
+  "algorithms": [
+    {
+      "name": "als",
+      "params": {
+        "rank": 10,
+        "numIterations": 10,
+        "lambda": 0.01,
+        "seed": 3
+      }
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/engines/recommendation-engine/manifest.json
----------------------------------------------------------------------
diff --git a/tests/pio_tests/engines/recommendation-engine/manifest.json b/tests/pio_tests/engines/recommendation-engine/manifest.json
new file mode 100644
index 0000000..9c545ce
--- /dev/null
+++ b/tests/pio_tests/engines/recommendation-engine/manifest.json
@@ -0,0 +1 @@
+{"id":"yDBpzjz39AjIxlOAh8W4t3QSc75uPCuJ","version":"ee98ff9c009ef0d9fb1bc6b78750b83a0ceb37b2","name":"recommendation-engine","description":"pio-autogen-manifest","files":[],"engineFactory":""}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/engines/recommendation-engine/project/assembly.sbt
----------------------------------------------------------------------
diff --git a/tests/pio_tests/engines/recommendation-engine/project/assembly.sbt b/tests/pio_tests/engines/recommendation-engine/project/assembly.sbt
new file mode 100644
index 0000000..54c3252
--- /dev/null
+++ b/tests/pio_tests/engines/recommendation-engine/project/assembly.sbt
@@ -0,0 +1 @@
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/5320724a/tests/pio_tests/engines/recommendation-engine/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/tests/pio_tests/engines/recommendation-engine/project/pio-build.sbt b/tests/pio_tests/engines/recommendation-engine/project/pio-build.sbt
new file mode 100644
index 0000000..8346a96
--- /dev/null
+++ b/tests/pio_tests/engines/recommendation-engine/project/pio-build.sbt
@@ -0,0 +1 @@
+addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")



Mime
View raw message