zeppelin-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From m...@apache.org
Subject incubator-zeppelin git commit: Improve travis build script
Date Wed, 13 Jan 2016 00:09:59 GMT
Repository: incubator-zeppelin
Updated Branches:
  refs/heads/master 3d4dc4366 -> 0c42f4332


Improve travis build script

### What is this PR for?
Recent Travis CI build is failing with

```
The log length has exceeded the limit of 4 Megabytes (this usually means that test suite is
raising the same exception over and over).

The build has been terminated.
```

### What type of PR is it?
Improvement

### Todos
* [x] - Update .travis.yml to use build matrix

### Is there a relevant Jira issue?

### How should this be tested?

### Screenshots (if appropriate)

### Questions:
* Does the licenses files need update? no
* Is there breaking changes for older versions? no
* Does this needs documentation? no

Author: Lee moon soo <moon@apache.org>

Closes #626 from Leemoonsoo/build_matrix and squashes the following commits:

c13086a [Lee moon soo] Remove jdk8
193dcd2 [Lee moon soo] Add scalding profile
a1ce0d7 [Lee moon soo] Enable test for other spark versions
8e0e94f [Lee moon soo] Another try
f8f69ae [Lee moon soo] try in a different way
3e523c5 [Lee moon soo] update spark module test
6953278 [Lee moon soo] Update
5615eb9 [Lee moon soo] use build matrix


Project: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/commit/0c42f433
Tree: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/tree/0c42f433
Diff: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/diff/0c42f433

Branch: refs/heads/master
Commit: 0c42f4332ee5b98f770fa9d12237581cfce99e4f
Parents: 3d4dc43
Author: Lee moon soo <moon@apache.org>
Authored: Tue Jan 12 15:43:54 2016 -0800
Committer: Lee moon soo <moon@apache.org>
Committed: Tue Jan 12 16:12:10 2016 -0800

----------------------------------------------------------------------
 .travis.yml | 77 +++++++++++++++++++++++---------------------------------
 1 file changed, 31 insertions(+), 46 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/0c42f433/.travis.yml
----------------------------------------------------------------------
diff --git a/.travis.yml b/.travis.yml
index 74f1805..2619ed1 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -14,61 +14,46 @@
 # limitations under the License.
 
 language: java
-jdk:
-  - oraclejdk7
+
+matrix:
+  include:
+    # Test all modules
+    - jdk: "oraclejdk7"
+      env: SPARK_VER="1.6.0" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Phadoop-2.3 -Ppyspark
-Pscalding" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr"
+
+    # Test spark module for 1.5.2
+    - jdk: "oraclejdk7"
+      env: SPARK_VER="1.5.2" HADOOP_VER="2.3" PROFILE="-Pspark-1.5 -Phadoop-2.3 -Ppyspark"
BUILD_FLAG="package -DskipTests" TEST_FLAG="verify"
+
+    # Test spark module for 1.4.1
+    - jdk: "oraclejdk7"
+      env: SPARK_VER="1.4.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.4 -Phadoop-2.3 -Ppyspark"
BUILD_FLAG="package -DskipTests" TEST_FLAG="verify"
+
+    # Test spark module for 1.3.1
+    - jdk: "oraclejdk7"
+      env: SPARK_VER="1.3.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.3 -Phadoop-2.3 -Ppyspark"
BUILD_FLAG="package -DskipTests" TEST_FLAG="verify"
+
+    # Test spark module for 1.2.1
+    - jdk: "oraclejdk7"
+      env: SPARK_VER="1.2.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.2 -Phadoop-2.3 -Ppyspark"
BUILD_FLAG="package -DskipTests" TEST_FLAG="verify"
+
+    # Test spark module for 1.1.1
+    - jdk: "oraclejdk7"
+      env: SPARK_VER="1.1.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.1 -Phadoop-2.3 -Ppyspark"
BUILD_FLAG="package -DskipTests" TEST_FLAG="verify"
 
 before_install:
   - "export DISPLAY=:99.0"
   - "sh -e /etc/init.d/xvfb start"
 
 install:
-  - mvn package -DskipTests -Pspark-1.6 -Phadoop-2.3 -Ppyspark -Pscalding -B
+  - mvn $BUILD_FLAG $PROFILE -B
 
 before_script:
-  -
+  - ./testing/startSparkCluster.sh $SPARK_VER $HADOOP_VER
+  - echo "export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER" > conf/zeppelin-env.sh
 
 script:
- # spark 1.6
-  - mvn package -Pbuild-distr -Pspark-1.6 -Phadoop-2.3 -Ppyspark -Pscalding -B
-  - ./testing/startSparkCluster.sh 1.6.0 2.3
-  - echo "export SPARK_HOME=`pwd`/spark-1.6.0-bin-hadoop2.3" > conf/zeppelin-env.sh
-  - mvn verify -Pusing-packaged-distr -Pspark-1.6 -Phadoop-2.3 -Ppyspark -Pscalding -B
-  - ./testing/stopSparkCluster.sh 1.6.0 2.3
- # spark 1.5
-  - rm -rf `pwd`/interpreter/spark
-  - mvn package -DskipTests -Pspark-1.5 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
-  - ./testing/startSparkCluster.sh 1.5.2 2.3
-  - echo "export SPARK_HOME=`pwd`/spark-1.5.2-bin-hadoop2.3" > conf/zeppelin-env.sh
-  - mvn package -Pspark-1.5 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server'
-Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
-  - ./testing/stopSparkCluster.sh 1.5.2 2.3
- # spark 1.4
-  - rm -rf `pwd`/interpreter/spark
-  - mvn package -DskipTests -Pspark-1.4 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
-  - ./testing/startSparkCluster.sh 1.4.1 2.3
-  - echo "export SPARK_HOME=`pwd`/spark-1.4.1-bin-hadoop2.3" > conf/zeppelin-env.sh
-  - mvn package -Pspark-1.4 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server'
-Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
-  - ./testing/stopSparkCluster.sh 1.4.1 2.3  
- # spark 1.3
-  - rm -rf `pwd`/interpreter/spark
-  - mvn package -DskipTests -Pspark-1.3 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
-  - ./testing/startSparkCluster.sh 1.3.1 2.3
-  - echo "export SPARK_HOME=`pwd`/spark-1.3.1-bin-hadoop2.3" > conf/zeppelin-env.sh
-  - mvn package -Pspark-1.3 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server'
-Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
-  - ./testing/stopSparkCluster.sh 1.3.1 2.3
- # spark 1.2
-  - rm -rf `pwd`/interpreter/spark
-  - mvn package -Pspark-1.2 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
-  - ./testing/startSparkCluster.sh 1.2.1 2.3
-  - echo "export SPARK_HOME=`pwd`/spark-1.2.1-bin-hadoop2.3" > conf/zeppelin-env.sh
-  - mvn package -Pspark-1.2 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server'
-Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
-  - ./testing/stopSparkCluster.sh 1.2.1 2.3
-# spark 1.1
-  - rm -rf `pwd`/interpreter/spark
-  - mvn package -Pspark-1.1 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
-  - ./testing/startSparkCluster.sh 1.1.1 2.3
-  - echo "export SPARK_HOME=`pwd`/spark-1.1.1-bin-hadoop2.3" > conf/zeppelin-env.sh
-  - mvn package -Pspark-1.1 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server'
-Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
-  - ./testing/stopSparkCluster.sh 1.1.1 2.3
+  - mvn $TEST_FLAG $PROFILE -B
 
 after_failure:
   - cat target/rat.txt
@@ -77,7 +62,7 @@ after_failure:
   - cat zeppelin-distribution/target/zeppelin-*-SNAPSHOT/zeppelin-*-SNAPSHOT/logs/zeppelin*.out
 
 after_script:
-  -
+  - ./testing/stopSparkCluster.sh $SPARK_VER $HADOOP_VER
 
 notifications:
   slack:


Mime
View raw message