predictionio-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From don...@apache.org
Subject [30/50] [abbrv] incubator-predictionio git commit: [PIO-30] Set up a cross build for Spark 2.0 and Scala 2.11
Date Mon, 24 Apr 2017 17:34:15 GMT
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tests/docker-files/env-conf/pio-env.sh
----------------------------------------------------------------------
diff --git a/tests/docker-files/env-conf/pio-env.sh b/tests/docker-files/env-conf/pio-env.sh
index 0acf3a7..16f1fef 100644
--- a/tests/docker-files/env-conf/pio-env.sh
+++ b/tests/docker-files/env-conf/pio-env.sh
@@ -24,9 +24,9 @@
 # you need to change these to fit your site.
 
 # SPARK_HOME: Apache Spark is a hard dependency and must be configured.
-SPARK_HOME=$SPARK_HOME
+# SPARK_HOME=$SPARK_HOME
 
-POSTGRES_JDBC_DRIVER=/drivers/postgresql-9.4-1204.jdbc41.jar
+POSTGRES_JDBC_DRIVER=/drivers/$PGSQL_JAR
 MYSQL_JDBC_DRIVER=
 
 # ES_CONF_DIR: You must configure this if you have advanced configuration for
@@ -88,7 +88,16 @@ PIO_STORAGE_SOURCES_ELASTICSEARCH_TYPE=elasticsearch
 #PIO_STORAGE_SOURCES_ELASTICSEARCH_CLUSTERNAME=pio
 PIO_STORAGE_SOURCES_ELASTICSEARCH_HOSTS=elasticsearch
 PIO_STORAGE_SOURCES_ELASTICSEARCH_SCHEMES=http
-PIO_STORAGE_SOURCES_ELASTICSEARCH_PORTS=9200
+if [ ! -z "$PIO_ELASTICSEARCH_VERSION" ]; then
+    ES_MAJOR=`echo $PIO_ELASTICSEARCH_VERSION | awk -F. '{print $1}'`
+else
+    ES_MAJOR=1
+fi
+if [ "$ES_MAJOR" = "1" ]; then
+    PIO_STORAGE_SOURCES_ELASTICSEARCH_PORTS=9300
+else
+    PIO_STORAGE_SOURCES_ELASTICSEARCH_PORTS=9200
+fi
 #PIO_STORAGE_SOURCES_ELASTICSEARCH_HOME=$ELASTICSEARCH_HOME
 
 # Local File System Example

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tests/docker-files/init.sh
----------------------------------------------------------------------
diff --git a/tests/docker-files/init.sh b/tests/docker-files/init.sh
index 8dc08ea..fc12ffe 100755
--- a/tests/docker-files/init.sh
+++ b/tests/docker-files/init.sh
@@ -17,7 +17,7 @@
 #
 
 set -e
-export PYTHONPATH=/$PIO_HOME/tests:$PYTHONPATH
+export PYTHONPATH=$PIO_HOME/tests:$PYTHONPATH
 echo "Sleeping $SLEEP_TIME seconds for all services to be ready..."
 sleep $SLEEP_TIME
 eval $@

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tests/docker-files/set_build_profile.sh
----------------------------------------------------------------------
diff --git a/tests/docker-files/set_build_profile.sh b/tests/docker-files/set_build_profile.sh
new file mode 100755
index 0000000..141dd46
--- /dev/null
+++ b/tests/docker-files/set_build_profile.sh
@@ -0,0 +1,31 @@
+#!/bin/bash -
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Sets version of profile dependencies from sbt configuration.
+# eg. Run `source ./set_build_profile.sh scala-2.11`
+
+set -e
+
+if [[ "$#" -ne 1 ]]; then
+  echo "Usage: set-build-profile.sh <build-profile>"
+  exit 1
+fi
+
+set -a
+eval `$PIO_HOME/sbt/sbt --error 'set showSuccess := false' -Dbuild.profile=$1 printProfile
| grep '.*_VERSION=.*'`
+set +a

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tests/pio_tests/engines/recommendation-engine/build.sbt
----------------------------------------------------------------------
diff --git a/tests/pio_tests/engines/recommendation-engine/build.sbt b/tests/pio_tests/engines/recommendation-engine/build.sbt
index 52e8742..13fe354 100644
--- a/tests/pio_tests/engines/recommendation-engine/build.sbt
+++ b/tests/pio_tests/engines/recommendation-engine/build.sbt
@@ -19,11 +19,13 @@ import AssemblyKeys._
 
 assemblySettings
 
+scalaVersion in ThisBuild := sys.env.getOrElse("PIO_SCALA_VERSION", "2.10.5")
+
 name := "template-scala-parallel-recommendation"
 
 organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "org.apache.predictionio" %% "apache-predictionio-core" % pioVersion.value % "provided",
-  "org.apache.spark"        %% "spark-core"        % "1.3.0" % "provided",
-  "org.apache.spark"        %% "spark-mllib"       % "1.3.0" % "provided")
+  "org.apache.predictionio" %% "apache-predictionio-core" % "0.11.0-SNAPSHOT" % "provided",
+  "org.apache.spark"        %% "spark-core"    % sys.env.getOrElse("PIO_SPARK_VERSION", "1.6.3")
% "provided",
+  "org.apache.spark"        %% "spark-mllib"   % sys.env.getOrElse("PIO_SPARK_VERSION", "1.6.3")
% "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tests/pio_tests/engines/recommendation-engine/manifest.json
----------------------------------------------------------------------
diff --git a/tests/pio_tests/engines/recommendation-engine/manifest.json b/tests/pio_tests/engines/recommendation-engine/manifest.json
deleted file mode 100644
index 9c545ce..0000000
--- a/tests/pio_tests/engines/recommendation-engine/manifest.json
+++ /dev/null
@@ -1 +0,0 @@
-{"id":"yDBpzjz39AjIxlOAh8W4t3QSc75uPCuJ","version":"ee98ff9c009ef0d9fb1bc6b78750b83a0ceb37b2","name":"recommendation-engine","description":"pio-autogen-manifest","files":[],"engineFactory":""}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tests/pio_tests/engines/recommendation-engine/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/tests/pio_tests/engines/recommendation-engine/project/pio-build.sbt b/tests/pio_tests/engines/recommendation-engine/project/pio-build.sbt
deleted file mode 100644
index 8346a96..0000000
--- a/tests/pio_tests/engines/recommendation-engine/project/pio-build.sbt
+++ /dev/null
@@ -1 +0,0 @@
-addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tests/pio_tests/scenarios/quickstart_test.py
----------------------------------------------------------------------
diff --git a/tests/pio_tests/scenarios/quickstart_test.py b/tests/pio_tests/scenarios/quickstart_test.py
index 1c5f422..ab7180d 100644
--- a/tests/pio_tests/scenarios/quickstart_test.py
+++ b/tests/pio_tests/scenarios/quickstart_test.py
@@ -89,8 +89,8 @@ class QuickStartTest(BaseTestCase):
     self.app.build(engine_dir=engine_path)
     self.log.info("Training...")
     self.app.train(engine_dir=engine_path)
-    self.log.info("Deploying and waiting 15s for it to start...")
-    self.app.deploy(wait_time=15, engine_dir=engine_path)
+    self.log.info("Deploying and waiting 30s for it to start...")
+    self.app.deploy(wait_time=30, engine_dir=engine_path)
 
     self.log.info("Sending a single query and checking results")
     user_query = { "user": 1, "num": 4 }
@@ -153,8 +153,8 @@ class QuickStartTest(BaseTestCase):
     self.app.build()
     self.log.info("Training...")
     self.app.train()
-    self.log.info("Deploying and waiting 15s for it to start...")
-    self.app.deploy(wait_time=15)
+    self.log.info("Deploying and waiting 35s for it to start...")
+    self.app.deploy(wait_time=35)
 
     self.log.info("Testing pio commands outside of engine directory")
     self.engine_dir_test()

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tests/pio_tests/utils.py
----------------------------------------------------------------------
diff --git a/tests/pio_tests/utils.py b/tests/pio_tests/utils.py
index e6c5b0b..05c8d1c 100644
--- a/tests/pio_tests/utils.py
+++ b/tests/pio_tests/utils.py
@@ -151,10 +151,11 @@ def import_events_batch(events, test_context, appid, channel=None):
   try:
       with open(file_path, 'w') as f:
           f.write(contents)
-      srun('pio import --appid {} --input {} {}'.format(
+      srun('pio import --appid {} --input {} {} -- {}'.format(
           appid,
           file_path,
-          '--channel {}'.format(channel) if channel else ''))
+          '--channel {}'.format(channel) if channel else '',
+          '--conf spark.sql.warehouse.dir=file:///tmp/spark-warehouse'))
   finally:
       os.remove(file_path)
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tests/run_docker.sh
----------------------------------------------------------------------
diff --git a/tests/run_docker.sh b/tests/run_docker.sh
index 6257fa2..ad7e189 100755
--- a/tests/run_docker.sh
+++ b/tests/run_docker.sh
@@ -46,17 +46,10 @@ shift
 
 DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
 
-if [ "$ES_VERSION" = "1" ]; then
-    docker-compose -f $DIR/docker-compose-es1.yml run \
-      -e PIO_STORAGE_REPOSITORIES_METADATA_SOURCE=$META \
-      -e PIO_STORAGE_REPOSITORIES_EVENTDATA_SOURCE=$EVENT \
-      -e PIO_STORAGE_REPOSITORIES_MODELDATA_SOURCE=$MODEL \
-      pio-testing $@
-else
-    docker-compose -f $DIR/docker-compose.yml run \
-      -e PIO_STORAGE_REPOSITORIES_METADATA_SOURCE=$META \
-      -e PIO_STORAGE_REPOSITORIES_EVENTDATA_SOURCE=$EVENT \
-      -e PIO_STORAGE_REPOSITORIES_MODELDATA_SOURCE=$MODEL \
-      pio-testing $@
-fi
+source $DIR/../conf/pio-vendors.sh
 
+docker-compose -f $DIR/docker-compose.yml run \
+  -e PIO_STORAGE_REPOSITORIES_METADATA_SOURCE=$META \
+  -e PIO_STORAGE_REPOSITORIES_EVENTDATA_SOURCE=$EVENT \
+  -e PIO_STORAGE_REPOSITORIES_MODELDATA_SOURCE=$MODEL \
+  pio-testing $@

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tests/script.travis.sh
----------------------------------------------------------------------
diff --git a/tests/script.travis.sh b/tests/script.travis.sh
index db69413..1d4985e 100755
--- a/tests/script.travis.sh
+++ b/tests/script.travis.sh
@@ -17,9 +17,9 @@
 #
 
 if [[ $BUILD_TYPE == Unit ]]; then
-  ./tests/run_docker.sh $METADATA_REP $EVENTDATA_REP $MODELDATA_REP \
-    /PredictionIO/tests/unit.sh
+  tests/run_docker.sh $METADATA_REP $EVENTDATA_REP $MODELDATA_REP \
+    "/PredictionIO/tests/unit.sh"
 else
-  ./tests/run_docker.sh $METADATA_REP $EVENTDATA_REP $MODELDATA_REP \
-    python3 /PredictionIO/tests/pio_tests/tests.py
+  tests/run_docker.sh $METADATA_REP $EVENTDATA_REP $MODELDATA_REP \
+    "python3 /PredictionIO/tests/pio_tests/tests.py"
 fi

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tests/unit.sh
----------------------------------------------------------------------
diff --git a/tests/unit.sh b/tests/unit.sh
index 6382a70..1421dce 100755
--- a/tests/unit.sh
+++ b/tests/unit.sh
@@ -14,20 +14,31 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+#
 
-# Run license check
 pushd /PredictionIO
 
+# Run license check
 ./tests/check_license.sh
 
 # Prepare pio environment variables
 set -a
-source conf/pio-env.sh
+source ./conf/pio-env.sh
 set +a
+source ./conf/pio-vendors.sh
 
 # Run stylecheck
-sbt/sbt scalastyle
+sbt/sbt scalastyle \
+    -Dscala.version=$PIO_SCALA_VERSION \
+    -Dspark.version=$PIO_SPARK_VERSION \
+    -Dhadoop.version=$PIO_HADOOP_VERSION \
+    -Delasticsearch.version=$PIO_ELASTICSEARCH_VERSION
+
 # Run all unit tests
-sbt/sbt test
+sbt/sbt dataJdbc/compile test storage/test \
+    -Dscala.version=$PIO_SCALA_VERSION \
+    -Dspark.version=$PIO_SPARK_VERSION \
+    -Dhadoop.version=$PIO_HADOOP_VERSION \
+    -Delasticsearch.version=$PIO_ELASTICSEARCH_VERSION
 
 popd

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tools/build.sbt
----------------------------------------------------------------------
diff --git a/tools/build.sbt b/tools/build.sbt
index 57e7d96..483a591 100644
--- a/tools/build.sbt
+++ b/tools/build.sbt
@@ -15,27 +15,17 @@
  * limitations under the License.
  */
 
+import PIOBuild._
 import sbtassembly.AssemblyPlugin.autoImport._
 
 name := "apache-predictionio-tools"
 
 libraryDependencies ++= Seq(
-  "com.github.scopt"       %% "scopt"          % "3.2.0",
-  "io.spray"               %% "spray-can"      % "1.3.3",
-  "io.spray"               %% "spray-routing"  % "1.3.3",
   "me.lessis"               % "semverfi_2.10"  % "0.1.3",
-  "org.apache.hadoop"       % "hadoop-common"  % "2.6.2",
-  "org.apache.hadoop"       % "hadoop-hdfs"    % "2.6.2",
-  "org.apache.spark"       %% "spark-core"     % sparkVersion.value % "provided",
   "org.apache.spark"       %% "spark-sql"      % sparkVersion.value % "provided",
-  "org.clapper"            %% "grizzled-slf4j" % "1.0.2",
-  "org.json4s"             %% "json4s-native"  % json4sVersion.value,
-  "org.json4s"             %% "json4s-ext"     % json4sVersion.value,
-  "org.scalaj"             %% "scalaj-http"    % "1.1.6",
-  "org.spark-project.akka" %% "akka-actor"     % "2.3.4-spark",
+  "com.typesafe.akka"      %% "akka-slf4j"     % akkaVersion.value,
   "io.spray"               %% "spray-testkit"  % "1.3.3" % "test",
-  "org.specs2"             %% "specs2"         % "2.3.13" % "test",
-  "org.spark-project.akka" %% "akka-slf4j"     % "2.3.4-spark")
+  "org.specs2"             %% "specs2"         % "2.3.13" % "test")
 
 dependencyOverrides +=   "org.slf4j" % "slf4j-log4j12" % "1.7.18"
 
@@ -49,12 +39,8 @@ assemblyMergeStrategy in assembly := {
 
 excludedJars in assembly <<= (fullClasspath in assembly) map { cp =>
   cp filter { _.data.getName match {
-    case "asm-3.1.jar" => true
-    case "commons-beanutils-1.7.0.jar" => true
     case "reflectasm-1.10.1.jar" => true
-    case "commons-beanutils-core-1.8.0.jar" => true
     case "kryo-3.0.3.jar" => true
-    case "slf4j-log4j12-1.7.5.jar" => true
     case _ => false
   }}
 }

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tools/src/main/scala/org/apache/predictionio/tools/admin/AdminAPI.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/admin/AdminAPI.scala b/tools/src/main/scala/org/apache/predictionio/tools/admin/AdminAPI.scala
index bbe39a5..7e8fd30 100644
--- a/tools/src/main/scala/org/apache/predictionio/tools/admin/AdminAPI.scala
+++ b/tools/src/main/scala/org/apache/predictionio/tools/admin/AdminAPI.scala
@@ -34,6 +34,7 @@ import spray.httpx.Json4sSupport
 import spray.routing._
 
 import scala.concurrent.ExecutionContext
+import scala.concurrent.duration.Duration
 
 class AdminServiceActor(val commandClient: CommandClient)
   extends HttpServiceActor {
@@ -151,7 +152,7 @@ object AdminServer {
 }
 
 object AdminRun {
-  def main (args: Array[String]) {
+  def main (args: Array[String]) : Unit = {
     AdminServer.createAdminServer(AdminServerConfig(
       ip = "localhost",
       port = 7071))

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/00779c3d/tools/src/main/scala/org/apache/predictionio/tools/export/EventsToFile.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/export/EventsToFile.scala
b/tools/src/main/scala/org/apache/predictionio/tools/export/EventsToFile.scala
index de09cab..c101d3f 100644
--- a/tools/src/main/scala/org/apache/predictionio/tools/export/EventsToFile.scala
+++ b/tools/src/main/scala/org/apache/predictionio/tools/export/EventsToFile.scala
@@ -21,13 +21,13 @@ package org.apache.predictionio.tools.export
 import org.apache.predictionio.controller.Utils
 import org.apache.predictionio.data.storage.EventJson4sSupport
 import org.apache.predictionio.data.storage.Storage
+import org.apache.predictionio.data.SparkVersionDependent
 import org.apache.predictionio.tools.Runner
 import org.apache.predictionio.workflow.WorkflowContext
 import org.apache.predictionio.workflow.WorkflowUtils
 
 import grizzled.slf4j.Logging
 import org.apache.spark.sql.SaveMode
-import org.apache.spark.sql.SQLContext
 import org.json4s.native.Serialization._
 
 case class EventsToFileArgs(
@@ -91,14 +91,14 @@ object EventsToFile extends Logging {
         mode = "Export",
         batch = "App ID " + args.appId + channelStr,
         executorEnv = Runner.envStringToMap(args.env))
-      val sqlContext = new SQLContext(sc)
+      val sqlSession = SparkVersionDependent.sqlSession(sc)
       val events = Storage.getPEvents()
       val eventsRdd = events.find(appId = args.appId, channelId = channelId)(sc)
       val jsonStringRdd = eventsRdd.map(write(_))
       if (args.format == "json") {
         jsonStringRdd.saveAsTextFile(args.outputPath)
       } else {
-        val jsonDf = sqlContext.read.json(jsonStringRdd)
+        val jsonDf = sqlSession.read.json(jsonStringRdd)
         jsonDf.write.mode(SaveMode.ErrorIfExists).parquet(args.outputPath)
       }
       info(s"Events are exported to ${args.outputPath}/.")


Mime
View raw message