zeppelin-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zjf...@apache.org
Subject zeppelin git commit: [HOTFIX] Add livy build in .travis
Date Fri, 28 Apr 2017 00:05:18 GMT
Repository: zeppelin
Updated Branches:
  refs/heads/branch-0.7 c4cf06869 -> b9bc2b810


[HOTFIX] Add livy build in .travis

livy integration test is ignored, this PR add livy integration test in travis

[Hot Fix]

* [ ] - Task

* No jira created

CI pass

* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No

Author: Jeff Zhang <zjffdu@apache.org>

Closes #2279 from zjffdu/hotfix_livy and squashes the following commits:

674c987 [Jeff Zhang] [HOTFIX] Add livy build in .travis

(cherry picked from commit 0f1701da8bb98678ae8e486796b2c1e650125106)
Signed-off-by: Jeff Zhang <zjffdu@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/b9bc2b81
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/b9bc2b81
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/b9bc2b81

Branch: refs/heads/branch-0.7
Commit: b9bc2b810dea27fcef44062f56c4446eacb70b64
Parents: c4cf068
Author: Jeff Zhang <zjffdu@apache.org>
Authored: Fri Apr 21 17:38:51 2017 +0800
Committer: Jeff Zhang <zjffdu@apache.org>
Committed: Fri Apr 28 08:05:04 2017 +0800

----------------------------------------------------------------------
 .travis.yml                                          | 15 ++++++---------
 .../org/apache/zeppelin/livy/LivyInterpreterIT.java  |  8 ++++++--
 2 files changed, 12 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/zeppelin/blob/b9bc2b81/.travis.yml
----------------------------------------------------------------------
diff --git a/.travis.yml b/.travis.yml
index a5a9acb..c7e97d8 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -73,18 +73,15 @@ matrix:
     - jdk: "oraclejdk7"
       env: SCALA_VER="2.11" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Phadoop-2.6
-Ppyspark -Psparkr -Pscala-2.11" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test
-DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark"
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
 
-    # Test python/pyspark with python 2
-    - jdk: "oraclejdk7"
-      env: PYTHON="2" SCALA_VER="2.10" SPARK_VER="1.6.1" HADOOP_VER="2.6" PROFILE="-Pspark-1.6
-Phadoop-2.6 -Ppyspark" BUILD_FLAG="package -am -DskipTests -DskipRat" TEST_FLAG="test -DskipRat"
MODULES="-pl .,zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.*
-Dpyspark.test.exclude='' -DfailIfNoTests=false"
-
-    # Test python/pyspark with python 3
-    - jdk: "oraclejdk7"
-      env: PYTHON="3" SCALA_VER="2.11" SPARK_VER="2.0.0" HADOOP_VER="2.6" PROFILE="-Pspark-2.0
-Phadoop-2.6 -Ppyspark -Pscala-2.11" BUILD_FLAG="package -am -DskipTests -DskipRat" TEST_FLAG="test
-DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python"
TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.*
-Dpyspark.test.exclude='' -DfailIfNoTests=false"
+    # Test python/pyspark with python 2, livy 0.2
+    - sudo: required
+      jdk: "oraclejdk7"
+      env: PYTHON="2" SCALA_VER="2.10" SPARK_VER="1.6.1" HADOOP_VER="2.6" LIVY_VER="0.2.0"
PROFILE="-Pspark-1.6 -Phadoop-2.6 -Plivy-0.2" BUILD_FLAG="package -am -DskipTests -DskipRat"
TEST_FLAG="verify -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python,livy"
TEST_PROJECTS="-Dtest=LivySQLInterpreterTest,org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.*
-Dpyspark.test.exclude='' -DfailIfNoTests=false"
 
-    # Test livy with spark 1.5.2 and hadoop 2.6
+    # Test python/pyspark with python 3, livy 0.3
     - sudo: required
       jdk: "oraclejdk7"
-      env: SCALA_VER="2.10" $LIVY_VER="0.2.0" SPARK_VER="1.5.2" HADOOP_VER="2.6" PROFILE="-Pspark-1.5
-Phadoop-2.6" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" MODULES="-pl
zeppelin-interpreter,livy" TEST_PROJECTS="-DfailIfNoTests=false"
+      env: PYTHON="3" SCALA_VER="2.11" SPARK_VER="2.0.0" HADOOP_VER="2.6" LIVY_VER="0.3.0"
PROFILE="-Pspark-2.0 -Phadoop-2.6 -Pscala-2.11 -Plivy-0.3" BUILD_FLAG="package -am -DskipTests
-DskipRat" TEST_FLAG="verify -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python,livy"
TEST_PROJECTS="-Dtest=LivySQLInterpreterTest,org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.*
-Dpyspark.test.exclude='' -DfailIfNoTests=false"
 
 before_install:
   # check files included in commit range, clear bower_components if a bower.json file has
changed.

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/b9bc2b81/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java
----------------------------------------------------------------------
diff --git a/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java b/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java
index c8f355c..aec2742 100644
--- a/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java
+++ b/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java
@@ -318,13 +318,17 @@ public class LivyInterpreterIT {
             + "df.collect()", context);
         assertEquals(InterpreterResult.Code.SUCCESS, result.code());
         assertEquals(1, result.message().size());
-        assertTrue(result.message().get(0).getData().contains("[Row(_1=u'hello', _2=20)]"));
+        //python2 has u and python3 don't have u
+        assertTrue(result.message().get(0).getData().contains("[Row(_1=u'hello', _2=20)]")
+            || result.message().get(0).getData().contains("[Row(_1='hello', _2=20)]"));
       } else {
         result = pysparkInterpreter.interpret("df=spark.createDataFrame([(\"hello\",20)])\n"
             + "df.collect()", context);
         assertEquals(InterpreterResult.Code.SUCCESS, result.code());
         assertEquals(1, result.message().size());
-        assertTrue(result.message().get(0).getData().contains("[Row(_1=u'hello', _2=20)]"));
+        //python2 has u and python3 don't have u
+        assertTrue(result.message().get(0).getData().contains("[Row(_1=u'hello', _2=20)]")
+            || result.message().get(0).getData().contains("[Row(_1='hello', _2=20)]"));
       }
 
       // test magic api


Mime
View raw message