beam-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From Apache Jenkins Server <jenk...@builds.apache.org>
Subject Build failed in Jenkins: beam_PerformanceTests_ParquetIOIT_HDFS #55
Date Sun, 24 Jun 2018 22:38:32 GMT
See <https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/55/display/redirect>

------------------------------------------
[...truncated 289.81 KB...]
    	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
    	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
    	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
    	at com.sun.proxy.$Proxy64.create(Unknown Source)
    	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:296)
    	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    	at java.lang.reflect.Method.invoke(Method.java:498)
    	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
    	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
    	at com.sun.proxy.$Proxy65.create(Unknown Source)
    	at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1648)
    	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1689)
    	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1624)
    	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:448)
    	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:444)
    	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
    	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:459)
    	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:387)
    	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
    	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:892)
    	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:789)
    	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:778)
    	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:109)
    	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:68)
    	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:249)
    	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:236)
    	at org.apache.beam.sdk.io.FileBasedSink$Writer.open(FileBasedSink.java:924)
    	at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn.processElement(WriteFiles.java:503)
    	at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn$DoFnInvoker.invokeProcessElement(Unknown
Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:185)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:146)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:271)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:219)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:69)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:517)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:505)
    	at org.apache.beam.sdk.io.parquet.ParquetIOIT$DeterministicallyConstructAvroRecordsFn.processElement(ParquetIOIT.java:128)
    	at org.apache.beam.sdk.io.parquet.ParquetIOIT$DeterministicallyConstructAvroRecordsFn$DoFnInvoker.invokeProcessElement(Unknown
Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:185)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:149)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:271)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:219)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:69)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:517)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:505)
    	at org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn.processElement(FileBasedIOITHelper.java:70)
    	at org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn$DoFnInvoker.invokeProcessElement(Unknown
Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:185)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:149)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:200)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
    	at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:393)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:362)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:290)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)

    Jun 24, 2018 10:35:41 PM org.apache.beam.runners.dataflow.TestDataflowRunner$ErrorMonitorMessagesHandler
process
    INFO: Dataflow job 2018-06-24_15_18_52-1852645479699940397 threw exception. Failure message
was: org.apache.hadoop.net.ConnectTimeoutException: Call From parquetioit0writethenread-06241518-chbu-harness-qnzc.c.apache-beam-testing.internal/10.128.15.203
to 194.106.226.35.bc.googleusercontent.com:9000 failed on socket timeout exception: org.apache.hadoop.net.ConnectTimeoutException:
20000 millis timeout while waiting for channel to be ready for connect. ch : java.nio.channels.SocketChannel[connection-pending
remote=194.106.226.35.bc.googleusercontent.com/35.226.106.194:9000]; For more details see:
 http://wiki.apache.org/hadoop/SocketTimeout
    	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    	at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:792)
    	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:751)
    	at org.apache.hadoop.ipc.Client.call(Client.java:1479)
    	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
    	at com.sun.proxy.$Proxy64.create(Unknown Source)
    	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:296)
    	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    	at java.lang.reflect.Method.invoke(Method.java:498)
    	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
    	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
    	at com.sun.proxy.$Proxy65.create(Unknown Source)
    	at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1648)
    	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1689)
    	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1624)
    	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:448)
    	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:444)
    	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
    	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:459)
    	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:387)
    	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
    	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:892)
    	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:789)
    	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:778)
    	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:109)
    	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:68)
    	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:249)
    	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:236)
    	at org.apache.beam.sdk.io.FileBasedSink$Writer.open(FileBasedSink.java:924)
    	at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn.processElement(WriteFiles.java:503)
    Caused by: org.apache.hadoop.net.ConnectTimeoutException: 20000 millis timeout while waiting
for channel to be ready for connect. ch : java.nio.channels.SocketChannel[connection-pending
remote=194.106.226.35.bc.googleusercontent.com/35.226.106.194:9000]
    	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:534)
    	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
    	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
    	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
    	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
    	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
    	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
    	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
    	at com.sun.proxy.$Proxy64.create(Unknown Source)
    	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:296)
    	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    	at java.lang.reflect.Method.invoke(Method.java:498)
    	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
    	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
    	at com.sun.proxy.$Proxy65.create(Unknown Source)
    	at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1648)
    	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1689)
    	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1624)
    	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:448)
    	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:444)
    	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
    	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:459)
    	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:387)
    	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
    	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:892)
    	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:789)
    	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:778)
    	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:109)
    	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:68)
    	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:249)
    	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:236)
    	at org.apache.beam.sdk.io.FileBasedSink$Writer.open(FileBasedSink.java:924)
    	at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn.processElement(WriteFiles.java:503)
    	at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn$DoFnInvoker.invokeProcessElement(Unknown
Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:185)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:146)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:271)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:219)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:69)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:517)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:505)
    	at org.apache.beam.sdk.io.parquet.ParquetIOIT$DeterministicallyConstructAvroRecordsFn.processElement(ParquetIOIT.java:128)
    	at org.apache.beam.sdk.io.parquet.ParquetIOIT$DeterministicallyConstructAvroRecordsFn$DoFnInvoker.invokeProcessElement(Unknown
Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:185)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:149)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:271)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:219)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:69)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:517)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:505)
    	at org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn.processElement(FileBasedIOITHelper.java:70)
    	at org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn$DoFnInvoker.invokeProcessElement(Unknown
Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:185)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:149)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:200)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
    	at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:393)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:362)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:290)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)

    Jun 24, 2018 10:36:02 PM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler
process
    INFO: 2018-06-24T22:36:01.285Z: Autoscaling: Resizing worker pool from 42 to 48.
    Jun 24, 2018 10:36:07 PM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler
process
    INFO: 2018-06-24T22:36:06.800Z: Autoscaling: Raised the number of workers to 48 based
on the rate of progress in the currently running step(s).
    Jun 24, 2018 10:36:31 PM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler
process
    INFO: 2018-06-24T22:36:31.214Z: Autoscaling: Resizing worker pool from 48 to 56.
    Jun 24, 2018 10:36:48 PM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler
process
    INFO: 2018-06-24T22:36:47.164Z: Autoscaling: Raised the number of workers to 56 based
on the rate of progress in the currently running step(s).
    Jun 24, 2018 10:37:02 PM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler
process
    INFO: 2018-06-24T22:37:01.188Z: Autoscaling: Resizing worker pool from 56 to 65.
    Jun 24, 2018 10:37:35 PM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler
process
    INFO: 2018-06-24T22:37:32.390Z: Autoscaling: Resizing worker pool from 65 to 75.
    Jun 24, 2018 10:37:35 PM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler
process
    WARNING: 2018-06-24T22:37:33.770Z: Autoscaling: Unable to reach resize target in zone
us-central1-a. QUOTA_EXCEEDED: Quota 'DISKS_TOTAL_GB' exceeded.  Limit: 45000.0 in region
us-central1.
    Jun 24, 2018 10:38:06 PM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler
process
    INFO: 2018-06-24T22:38:00.468Z: Autoscaling: Resizing worker pool from 75 to 85.

STDERR: 
2018-06-24 22:38:27,400 b98dbf5c MainThread beam_integration_benchmark(1/1) ERROR    Error
during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",>
line 667, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",>
line 547, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",>
line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",>
line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-06-24 22:38:27,401 b98dbf5c MainThread beam_integration_benchmark(1/1) INFO     Cleaning
up benchmark beam_integration_benchmark
2018-06-24 22:38:27,401 b98dbf5c MainThread beam_integration_benchmark(1/1) INFO     Running:
kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/config-beam-performancetests-parquetioit-hdfs-55>
delete -f <https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/src/.test-infra/kubernetes/hadoop/LargeITCluster/hdfs-multi-datanode-cluster.yml>
--ignore-not-found
2018-06-24 22:38:31,239 b98dbf5c MainThread beam_integration_benchmark(1/1) ERROR    Exception
running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",>
line 801, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",>
line 667, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",>
line 547, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",>
line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",>
line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-06-24 22:38:31,240 b98dbf5c MainThread beam_integration_benchmark(1/1) ERROR    Benchmark
1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-06-24 22:38:31,289 b98dbf5c MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-06-24 22:38:31,290 b98dbf5c MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/runs/b98dbf5c/pkb.log>
2018-06-24 22:38:31,290 b98dbf5c MainThread INFO     Completion statuses can be found at:
<https://builds.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/runs/b98dbf5c/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Mime
View raw message