phoenix-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From Apache Jenkins Server <jenk...@builds.apache.org>
Subject Build failed in Jenkins: Phoenix | Master #1529
Date Tue, 03 Jan 2017 16:15:18 GMT
See <https://builds.apache.org/job/Phoenix-master/1529/changes>

Changes:

[jmahonin] PHOENIX-3333 Support Spark 2.0

------------------------------------------
[...truncated 18357 lines...]
  at org.apache.phoenix.query.BaseTest.initMiniCluster(BaseTest.java:591)
  at org.apache.phoenix.query.BaseTest.setUpTestCluster(BaseTest.java:509)
  at org.apache.phoenix.query.BaseTest.checkClusterInitialized(BaseTest.java:483)
  at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:561)
  at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:557)
  at org.apache.phoenix.end2end.BaseHBaseManagedTimeIT.doSetup(BaseHBaseManagedTimeIT.java:57)
  at org.apache.phoenix.spark.PhoenixSparkITHelper$.doSetup(AbstractPhoenixSparkIT.scala:33)
  at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:88)
  at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)
  at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:44)
  ...
  Cause: java.io.IOException: Failed to save in any storage directories while saving
namespace.
  at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1176)
  at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1133)
  at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:163)
  at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:991)
  at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:342)
  at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:176)
  at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:973)
  at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:811)
  at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:742)
  at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:585)
  ...
990  [ScalaTest-3] ERROR org.apache.hadoop.hdfs.server.namenode.FSImage  - Failed to load
image from FSImageFile(file=<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data/19943bfa-228f-4b39-a46c-ba6a400acb30/dfscluster_66b19c4e-2cbb-45c4-a0d8-d9963cf2183c/dfs/name1/current/fsimage_0000000000000000000,>
cpktTxId=0000000000000000000)
java.io.IOException: No MD5 file found corresponding to image file <https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data/19943bfa-228f-4b39-a46c-ba6a400acb30/dfscluster_66b19c4e-2cbb-45c4-a0d8-d9963cf2183c/dfs/name1/current/fsimage_0000000000000000000>
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:940)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImageFile(FSImage.java:740)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:676)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.loadFSImage(FSNamesystem.java:976)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.loadFromDisk(FSNamesystem.java:681)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:584)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:644)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:811)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:795)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1488)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:1111)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:982)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:811)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:742)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:585)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:982)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:863)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:845)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:832)
	at org.apache.phoenix.query.BaseTest.initMiniCluster(BaseTest.java:588)
	at org.apache.phoenix.query.BaseTest.setUpTestCluster(BaseTest.java:509)
	at org.apache.phoenix.query.BaseTest.checkClusterInitialized(BaseTest.java:483)
	at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:561)
	at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:557)
	at org.apache.phoenix.end2end.BaseHBaseManagedTimeIT.doSetup(BaseHBaseManagedTimeIT.java:57)
	at org.apache.phoenix.spark.PhoenixSparkITHelper$.doSetup(AbstractPhoenixSparkIT.scala:33)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:88)
	at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:253)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.run(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
1100 [ScalaTest-3] ERROR org.apache.hadoop.hdfs.server.namenode.FSImage  - Failed to load
image from FSImageFile(file=<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data/19943bfa-228f-4b39-a46c-ba6a400acb30/dfscluster_66b19c4e-2cbb-45c4-a0d8-d9963cf2183c/dfs/name2/current/fsimage_0000000000000000000,>
cpktTxId=0000000000000000000)
java.io.IOException: No MD5 file found corresponding to image file <https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data/19943bfa-228f-4b39-a46c-ba6a400acb30/dfscluster_66b19c4e-2cbb-45c4-a0d8-d9963cf2183c/dfs/name2/current/fsimage_0000000000000000000>
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:940)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImageFile(FSImage.java:740)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:676)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.loadFSImage(FSNamesystem.java:976)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.loadFromDisk(FSNamesystem.java:681)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:584)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:644)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:811)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:795)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1488)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:1111)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:982)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:811)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:742)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:585)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:982)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:863)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:845)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:832)
	at org.apache.phoenix.query.BaseTest.initMiniCluster(BaseTest.java:588)
	at org.apache.phoenix.query.BaseTest.setUpTestCluster(BaseTest.java:509)
	at org.apache.phoenix.query.BaseTest.checkClusterInitialized(BaseTest.java:483)
	at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:561)
	at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:557)
	at org.apache.phoenix.end2end.BaseHBaseManagedTimeIT.doSetup(BaseHBaseManagedTimeIT.java:57)
	at org.apache.phoenix.spark.PhoenixSparkITHelper$.doSetup(AbstractPhoenixSparkIT.scala:33)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:88)
	at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:253)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.run(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
1211 [ScalaTest-3] ERROR org.apache.hadoop.hdfs.MiniDFSCluster  - IOE creating namenodes.
Permissions dump:
path '<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data/19943bfa-228f-4b39-a46c-ba6a400acb30/dfscluster_66b19c4e-2cbb-45c4-a0d8-d9963cf2183c/dfs/data'>:

	absolute:<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data/19943bfa-228f-4b39-a46c-ba6a400acb30/dfscluster_66b19c4e-2cbb-45c4-a0d8-d9963cf2183c/dfs/data>
	permissions: ----
path '<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data/19943bfa-228f-4b39-a46c-ba6a400acb30/dfscluster_66b19c4e-2cbb-45c4-a0d8-d9963cf2183c/dfs'>:

	absolute:<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data/19943bfa-228f-4b39-a46c-ba6a400acb30/dfscluster_66b19c4e-2cbb-45c4-a0d8-d9963cf2183c/dfs>
	permissions: drwx
path '<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data/19943bfa-228f-4b39-a46c-ba6a400acb30/dfscluster_66b19c4e-2cbb-45c4-a0d8-d9963cf2183c'>:

	absolute:<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data/19943bfa-228f-4b39-a46c-ba6a400acb30/dfscluster_66b19c4e-2cbb-45c4-a0d8-d9963cf2183c>
	permissions: drwx
path '<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data/19943bfa-228f-4b39-a46c-ba6a400acb30'>:

	absolute:<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data/19943bfa-228f-4b39-a46c-ba6a400acb30>
	permissions: drwx
path '<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data'>:

	absolute:<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target/test-data>
	permissions: drwx
path '<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target'>: 
	absolute:<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark/target>
	permissions: drwx
path '<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark'>: 
	absolute:<https://builds.apache.org/job/Phoenix-master/ws/phoenix-spark>
	permissions: drwx
path '<https://builds.apache.org/job/Phoenix-master/ws/'>: 
	absolute:<https://builds.apache.org/job/Phoenix-master/ws/>
	permissions: drwx
path '/home/jenkins/jenkins-slave/workspace': 
	absolute:/home/jenkins/jenkins-slave/workspace
	permissions: drwx
path '/home/jenkins/jenkins-slave': 
	absolute:/home/jenkins/jenkins-slave
	permissions: drwx
path '/home/jenkins': 
	absolute:/home/jenkins
	permissions: drwx
path '/home': 
	absolute:/home
	permissions: dr-x
path '/': 
	absolute:/
	permissions: dr-x

java.io.IOException: Failed to load an FSImage file!
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:687)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.loadFSImage(FSNamesystem.java:976)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.loadFromDisk(FSNamesystem.java:681)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:584)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:644)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:811)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:795)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1488)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:1111)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:982)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:811)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:742)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniDFSCluster(HBaseTestingUtility.java:585)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:982)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:863)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:845)
	at org.apache.hadoop.hbase.HBaseTestingUtility.startMiniCluster(HBaseTestingUtility.java:832)
	at org.apache.phoenix.query.BaseTest.initMiniCluster(BaseTest.java:588)
	at org.apache.phoenix.query.BaseTest.setUpTestCluster(BaseTest.java:509)
	at org.apache.phoenix.query.BaseTest.checkClusterInitialized(BaseTest.java:483)
	at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:561)
	at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:557)
	at org.apache.phoenix.end2end.BaseHBaseManagedTimeIT.doSetup(BaseHBaseManagedTimeIT.java:57)
	at org.apache.phoenix.spark.PhoenixSparkITHelper$.doSetup(AbstractPhoenixSparkIT.scala:33)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:88)
	at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:253)
	at org.apache.phoenix.spark.AbstractPhoenixSparkIT.run(AbstractPhoenixSparkIT.scala:44)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
Exception encountered when invoking run on a nested suite - java.io.IOException: Failed
to load an FSImage file! *** ABORTED ***
  java.lang.RuntimeException: java.io.IOException: Failed to load an FSImage file!
  at org.apache.phoenix.query.BaseTest.initMiniCluster(BaseTest.java:591)
  at org.apache.phoenix.query.BaseTest.setUpTestCluster(BaseTest.java:509)
  at org.apache.phoenix.query.BaseTest.checkClusterInitialized(BaseTest.java:483)
  at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:561)
  at org.apache.phoenix.query.BaseTest.setUpTestDriver(BaseTest.java:557)
  at org.apache.phoenix.end2end.BaseHBaseManagedTimeIT.doSetup(BaseHBaseManagedTimeIT.java:57)
  at org.apache.phoenix.spark.PhoenixSparkITHelper$.doSetup(AbstractPhoenixSparkIT.scala:33)
  at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:88)
  at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)
  at org.apache.phoenix.spark.AbstractPhoenixSparkIT.beforeAll(AbstractPhoenixSparkIT.scala:44)
  ...
  Cause: java.io.IOException: Failed to load an FSImage file!
  at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:687)
  at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:294)
  at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.loadFSImage(FSNamesystem.java:976)
  at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.loadFromDisk(FSNamesystem.java:681)
  at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:584)
  at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:644)
  at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:811)
  at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:795)
  at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1488)
  at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:1111)
  ...
Run completed in 3 seconds, 806 milliseconds.
Total number of tests run: 0
Suites: completed 2, aborted 2
Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
*** 2 SUITES ABORTED ***
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Phoenix ..................................... SUCCESS [  2.328 s]
[INFO] Phoenix Core ....................................... SUCCESS [51:28 min]
[INFO] Phoenix - Flume .................................... SUCCESS [01:23 min]
[INFO] Phoenix - Pig ...................................... SUCCESS [03:49 min]
[INFO] Phoenix Query Server Client ........................ SUCCESS [ 13.804 s]
[INFO] Phoenix Query Server ............................... SUCCESS [01:45 min]
[INFO] Phoenix - Pherf .................................... SUCCESS [01:41 min]
[INFO] Phoenix - Spark .................................... FAILURE [ 55.383 s]
[INFO] Phoenix - Hive ..................................... SKIPPED
[INFO] Phoenix Client ..................................... SKIPPED
[INFO] Phoenix Server ..................................... SKIPPED
[INFO] Phoenix Assembly ................................... SKIPPED
[INFO] Phoenix - Tracing Web Application .................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 01:01 h
[INFO] Finished at: 2017-01-03T16:09:20+00:00
[INFO] Final Memory: 106M/1208M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.scalatest:scalatest-maven-plugin:1.0:test (integration-test)
on project phoenix-spark: There are test failures -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following
articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :phoenix-spark
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
Compressed 1.57 GB of artifacts by 64.4% relative to #1528
Updating PHOENIX-3333
Recording test results

Mime
View raw message