carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From Apache Jenkins Server <jenk...@builds.apache.org>
Subject Build failed in Jenkins: carbondata-master-spark-2.2 #625
Date Mon, 09 Jul 2018 08:13:16 GMT
See <https://builds.apache.org/job/carbondata-master-spark-2.2/625/display/redirect?page=changes>

Changes:

[kunalkapoor642] [CARBONDATA-2689] Added validations for complex columns in alter set

------------------------------------------
[...truncated 62.68 MB...]
	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:82)
	at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
	at scala.collection.immutable.List.foldLeft(List.scala:84)
	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:82)
	at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:74)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:74)
	at org.apache.spark.sql.hive.CarbonAnalyzer.execute(CarbonAnalyzer.scala:42)
	at org.apache.spark.sql.hive.CarbonAnalyzer.execute(CarbonAnalyzer.scala:27)
	at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:69)
	at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:67)
	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:50)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:67)
	at org.apache.spark.sql.SparkSession.table(SparkSession.scala:618)
	at org.apache.spark.sql.execution.command.DropTableCommand.run(ddl.scala:203)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:67)
	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:183)
	at org.apache.spark.sql.CarbonSession$$anonfun$sql$1.apply(CarbonSession.scala:107)
	at org.apache.spark.sql.CarbonSession$$anonfun$sql$1.apply(CarbonSession.scala:96)
	at org.apache.spark.sql.CarbonSession.withProfiler(CarbonSession.scala:154)
	at org.apache.spark.sql.CarbonSession.sql(CarbonSession.scala:94)
	at org.apache.spark.sql.test.Spark2TestQueryExecutor.sql(Spark2TestQueryExecutor.scala:35)
	at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:113)
	at org.apache.carbondata.spark.testsuite.standardpartition.StandardPartitionWithPreaggregateTestCase$$anonfun$30.apply$mcV$sp(StandardPartitionWithPreaggregateTestCase.scala:529)
	at org.apache.carbondata.spark.testsuite.standardpartition.StandardPartitionWithPreaggregateTestCase$$anonfun$30.apply(StandardPartitionWithPreaggregateTestCase.scala:528)
	at org.apache.carbondata.spark.testsuite.standardpartition.StandardPartitionWithPreaggregateTestCase$$anonfun$30.apply(StandardPartitionWithPreaggregateTestCase.scala:528)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.carbondata.spark.testsuite.standardpartition.StandardPartitionWithPreaggregateTestCase.org$scalatest$BeforeAndAfterAll$$super$run(StandardPartitionWithPreaggregateTestCase.scala:29)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.carbondata.spark.testsuite.standardpartition.StandardPartitionWithPreaggregateTestCase.run(StandardPartitionWithPreaggregateTestCase.scala:29)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
18/07/09 01:12:21 AUDIT CarbonCreateTableCommand: [asf933.gq1.ygridcore.net][jenkins][Thread-1]Creating
Table with Database name [partition_preaggregate] and Table name [partitionallcompaction]
18/07/09 01:12:21 WARN HiveExternalCatalog: Couldn't find corresponding Hive SerDe for data
source provider org.apache.spark.sql.CarbonSource. Persisting data source table `partition_preaggregate`.`partitionallcompaction`
into Hive metastore in Spark SQL specific format, which is NOT compatible with Hive.
18/07/09 01:12:21 AUDIT CarbonCreateTableCommand: [asf933.gq1.ygridcore.net][jenkins][Thread-1]Table
created with Database name [partition_preaggregate] and Table name [partitionallcompaction]
18/07/09 01:12:21 AUDIT CarbonCreateTableCommand: [asf933.gq1.ygridcore.net][jenkins][Thread-1]Creating
Table with Database name [partition_preaggregate] and Table name [partitionallcompaction_sensor_1]
18/07/09 01:12:21 WARN HiveExternalCatalog: Couldn't find corresponding Hive SerDe for data
source provider org.apache.spark.sql.CarbonSource. Persisting data source table `partition_preaggregate`.`partitionallcompaction_sensor_1`
into Hive metastore in Spark SQL specific format, which is NOT compatible with Hive.
18/07/09 01:12:21 AUDIT CarbonCreateTableCommand: [asf933.gq1.ygridcore.net][jenkins][Thread-1]Table
created with Database name [partition_preaggregate] and Table name [partitionallcompaction_sensor_1]
18/07/09 01:12:21 AUDIT CarbonCreateDataMapCommand: [asf933.gq1.ygridcore.net][jenkins][Thread-1]DataMap
sensor_1 successfully added
18/07/09 01:12:22 ERROR GlobalSortHelper$: Executor task launch worker for task 70425 Data
Load is partially success for table partitionallcompaction
18/07/09 01:12:22 ERROR GlobalSortHelper$: Executor task launch worker for task 70426 Data
Load is partially success for table partitionallcompaction
18/07/09 01:12:23 AUDIT CacheProvider: [asf933.gq1.ygridcore.net][jenkins][Thread-1]The key
carbon.input.segments.partition_preaggregate.partitionallcompaction with value 0#0_1531123941911.segment
added in the session param
18/07/09 01:12:23 AUDIT CacheProvider: [asf933.gq1.ygridcore.net][jenkins][Thread-1]The key
validate.carbon.input.segments.partition_preaggregate.partitionallcompaction with value false
added in the session param
18/07/09 01:12:23 AUDIT CacheProvider: [asf933.gq1.ygridcore.net][jenkins][Thread-1]The key
carbon.query.directQueryOnDataMap.enabled with value true added in the session param
18/07/09 01:12:29 ERROR FileFactory: CarbonRecordWriter:partitionallcompaction_sensor_1  Failed
to create directory path /home/jenkins/jenkins-slave/workspace/carbondata-master-spark-2.2/integration/spark-common/target/warehouse/partition_preaggregate.db/partitionallcompaction_sensor_1/partitionallcompaction_deptname=network
18/07/09 01:12:29 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:29 WARN log: Updated size to 3231
18/07/09 01:12:29 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:29 WARN log: Updated size to 3130
18/07/09 01:12:29 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:29 WARN log: Updated size to 3141
18/07/09 01:12:29 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:29 WARN log: Updated size to 3141
18/07/09 01:12:29 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:29 WARN log: Updated size to 3141
18/07/09 01:12:29 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:29 WARN log: Updated size to 3130
18/07/09 01:12:29 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:29 WARN log: Updated size to 3141
18/07/09 01:12:29 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:29 WARN log: Updated size to 3141
18/07/09 01:12:29 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:29 WARN log: Updated size to 3141
18/07/09 01:12:29 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:29 WARN log: Updated size to 3130
18/07/09 01:12:30 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:30 WARN log: Updated size to 5689
18/07/09 01:12:30 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:30 WARN log: Updated size to 5678
18/07/09 01:12:30 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:30 WARN log: Updated size to 5632
18/07/09 01:12:30 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:30 WARN log: Updated size to 5654
18/07/09 01:12:30 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:30 WARN log: Updated size to 5665
18/07/09 01:12:30 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:30 WARN log: Updated size to 5711
18/07/09 01:12:30 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:30 WARN log: Updated size to 5779
18/07/09 01:12:30 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:30 WARN log: Updated size to 5700
18/07/09 01:12:30 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:30 WARN log: Updated size to 5654
18/07/09 01:12:30 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:30 WARN log: Updated size to 5643
18/07/09 01:12:30 ERROR GlobalSortHelper$: Executor task launch worker for task 71051 Data
Load is partially success for table partitionallcompaction
18/07/09 01:12:31 ERROR GlobalSortHelper$: Executor task launch worker for task 71052 Data
Load is partially success for table partitionallcompaction
18/07/09 01:12:31 AUDIT CacheProvider: [asf933.gq1.ygridcore.net][jenkins][Thread-1]The key
carbon.input.segments.partition_preaggregate.partitionallcompaction with value 1#1_1531123950665.segment
added in the session param
18/07/09 01:12:31 AUDIT CacheProvider: [asf933.gq1.ygridcore.net][jenkins][Thread-1]The key
validate.carbon.input.segments.partition_preaggregate.partitionallcompaction with value false
added in the session param
18/07/09 01:12:31 AUDIT CacheProvider: [asf933.gq1.ygridcore.net][jenkins][Thread-1]The key
carbon.query.directQueryOnDataMap.enabled with value true added in the session param
18/07/09 01:12:39 ERROR GlobalSortHelper$: Executor task launch worker for task 71677 Data
Load is partially success for table partitionallcompaction
18/07/09 01:12:39 ERROR GlobalSortHelper$: Executor task launch worker for task 71678 Data
Load is partially success for table partitionallcompaction
18/07/09 01:12:40 AUDIT CacheProvider: [asf933.gq1.ygridcore.net][jenkins][Thread-1]The key
carbon.input.segments.partition_preaggregate.partitionallcompaction with value 2#2_1531123959189.segment
added in the session param
18/07/09 01:12:40 AUDIT CacheProvider: [asf933.gq1.ygridcore.net][jenkins][Thread-1]The key
validate.carbon.input.segments.partition_preaggregate.partitionallcompaction with value false
added in the session param
18/07/09 01:12:40 AUDIT CacheProvider: [asf933.gq1.ygridcore.net][jenkins][Thread-1]The key
carbon.query.directQueryOnDataMap.enabled with value true added in the session param
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:47 WARN log: Updated size to 3141
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:47 WARN log: Updated size to 3141
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:47 WARN log: Updated size to 3141
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:47 WARN log: Updated size to 3141
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:47 WARN log: Updated size to 3141
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:47 WARN log: Updated size to 3141
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:47 WARN log: Updated size to 3141
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction_sensor_1
18/07/09 01:12:47 WARN log: Updated size to 3141
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:47 WARN log: Updated size to 5700
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:47 WARN log: Updated size to 5711
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:47 WARN log: Updated size to 5700
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:47 WARN log: Updated size to 5665
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:47 WARN log: Updated size to 5689
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:47 WARN log: Updated size to 5689
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:47 WARN log: Updated size to 5665
18/07/09 01:12:47 WARN log: Updating partition stats fast for: partitionallcompaction
18/07/09 01:12:47 WARN log: Updated size to 5654
18/07/09 01:12:48 ERROR GlobalSortHelper$: Executor task launch worker for task 72303 Data
Load is partially success for table partitionallcompaction
18/07/09 01:12:48 ERROR GlobalSortHelper$: Executor task launch worker for task 72304 Data
Load is partially success for table partitionallcompaction
18/07/09 01:12:49 AUDIT CacheProvider: [asf933.gq1.ygridcore.net][jenkins][Thread-1]The key
carbon.input.segments.partition_preaggregate.partitionallcompaction with value 3#3_1531123968151.segment
added in the session param
18/07/09 01:12:49 AUDIT CacheProvider: [asf933.gq1.ygridcore.net][jenkins][Thread-1]The key
validate.carbon.input.segments.partition_preaggregate.partitionallcompaction with value false
added in the session param
18/07/09 01:12:49 AUDIT CacheProvider: [asf933.gq1.ygridcore.net][jenkins][Thread-1]The key
carbon.query.directQueryOnDataMap.enabled with value true added in the session param
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
Not sending mail to unregistered user kunalkapoor642@gmail.com
Sending e-mails to: commits@carbondata.apache.org
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: Failed to parse POMs
java.io.IOException: Backing channel 'H33' is disconnected.
	at hudson.remoting.RemoteInvocationHandler.channelOrFail(RemoteInvocationHandler.java:214)
	at hudson.remoting.RemoteInvocationHandler.invoke(RemoteInvocationHandler.java:283)
	at com.sun.proxy.$Proxy152.isAlive(Unknown Source)
	at hudson.Launcher$RemoteLauncher$ProcImpl.isAlive(Launcher.java:1137)
	at hudson.maven.ProcessCache$MavenProcess.call(ProcessCache.java:166)
	at hudson.maven.MavenModuleSetBuild$MavenModuleSetBuildExecution.doRun(MavenModuleSetBuild.java:879)
	at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:504)
	at hudson.model.Run.execute(Run.java:1794)
	at hudson.maven.MavenModuleSetBuild.run(MavenModuleSetBuild.java:543)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
Caused by: java.io.IOException: Unexpected termination of the channel
	at hudson.remoting.SynchronousCommandTransport$ReaderThread.run(SynchronousCommandTransport.java:77)
Caused by: java.io.EOFException
	at java.io.ObjectInputStream$PeekInputStream.readFully(ObjectInputStream.java:2679)
	at java.io.ObjectInputStream$BlockDataInputStream.readShort(ObjectInputStream.java:3154)
	at java.io.ObjectInputStream.readStreamHeader(ObjectInputStream.java:862)
	at java.io.ObjectInputStream.<init>(ObjectInputStream.java:358)
	at hudson.remoting.ObjectInputStreamEx.<init>(ObjectInputStreamEx.java:48)
	at hudson.remoting.AbstractSynchronousByteArrayCommandTransport.read(AbstractSynchronousByteArrayCommandTransport.java:36)
	at hudson.remoting.SynchronousCommandTransport$ReaderThread.run(SynchronousCommandTransport.java:63)
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
ERROR: H33 is offline; cannot locate JDK 1.8 (latest)
ERROR: H33 is offline; cannot locate Maven 3.3.9
Not sending mail to unregistered user kunalkapoor642@gmail.com

Mime
View raw message