beam-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From Apache Jenkins Server <jenk...@builds.apache.org>
Subject Build failed in Jenkins: beam_PostCommit_Python_Verify #858
Date Sun, 11 Dec 2016 04:26:42 GMT
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/858/>

------------------------------------------
[...truncated 2731 lines...]
                        {
                          "@type": "PickleCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlwBmcnZOanOIA5XIYNmYyFjbSFTkh4A9DYR/g==",

                          "component_encodings": []
                        }, 
                        {
                          "@type": "PickleCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlwBmcnZOanOIA5XIYNmYyFjbSFTkh4A9DYR/g==",

                          "component_encodings": []
                        }
                      ], 
                      "is_pair_like": true
                    }, 
                    {
                      "@type": "TimestampCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlwhmbmpxSWJuQXOID5XIYNmYyFjbSFTkh4ANWETWg==",

                      "component_encodings": []
                    }, 
                    {
                      "@type": "PickleCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlwBmcnZOanOIA5XIYNmYyFjbSFTkh4A9DYR/g==",

                      "component_encodings": [
                        {
                          "@type": "PickleCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlwBmcnZOanOIA5XIYNmYyFjbSFTkh4A9DYR/g==",

                          "component_encodings": []
                        }, 
                        {
                          "@type": "PickleCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlwBmcnZOanOIA5XIYNmYyFjbSFTkh4A9DYR/g==",

                          "component_encodings": []
                        }
                      ], 
                      "is_pair_like": true
                    }
                  ], 
                  "is_wrapper": true
                }
              ]
            }, 
            "output_name": "out", 
            "user_name": "ViewAsSingleton(side.None)/CreatePCollectionView.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s2"
        }, 
        "user_name": "ViewAsSingleton(side.None)/CreatePCollectionView"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s4", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "<lambda>"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {
          "s3": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "s3"
          }
        }, 
        "output_info": [
          {
            "encoding": {
              "@type": "WindowedValueCoder$<string of 408 bytes>", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",

                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",

                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",

                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "TimestampCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlwhmbmpxSWJuQXOID5XIYNmYyFjbSFTkh4ANWETWg==",

                  "component_encodings": []
                }, 
                {
                  "@type": "SingletonCoder$<string of 256 bytes>", 
                  "component_encodings": []
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "compute.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s1"
        }, 
        "serialized_fn": "<string of 1116 bytes>", 
        "user_name": "compute"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
 id: u'2016-12-10_19_47_53-5199068634394198370'
 projectId: u'apache-beam-testing'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2016-12-10_19_47_53-5199068634394198370]
root: INFO: To access the Dataflow monitoring console, please navigate to https://console.developers.google.com/project/apache-beam-testing/dataflow/job/2016-12-10_19_47_53-5199068634394198370
root: INFO: Job 2016-12-10_19_47_53-5199068634394198370 is in state JOB_STATE_RUNNING
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe7e4f: 2016-12-11T03:47:54.063Z:
JOB_MESSAGE_DETAILED: (835808770f91568f): Checking required Cloud APIs are enabled.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe7fef: 2016-12-11T03:47:54.479Z:
JOB_MESSAGE_DETAILED: (835808770f9155a0): Expanding GroupByKey operations into optimizable
parts.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe7ff1: 2016-12-11T03:47:54.481Z:
JOB_MESSAGE_DETAILED: (835808770f915732): Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe7ff4: 2016-12-11T03:47:54.484Z:
JOB_MESSAGE_DETAILED: (835808770f9158c4): Annotating graph with Autotuner information.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe7ffc: 2016-12-11T03:47:54.492Z:
JOB_MESSAGE_DETAILED: (835808770f915d7a): Fusing adjacent ParDo, Read, Write, and Flatten
operations
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe800b: 2016-12-11T03:47:54.507Z:
JOB_MESSAGE_DEBUG: (835808770f915878): Workflow config is missing a default resource spec.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe800d: 2016-12-11T03:47:54.509Z:
JOB_MESSAGE_DETAILED: (835808770f915a0a): Adding StepResource setup and teardown to workflow
graph.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe8029: 2016-12-11T03:47:54.537Z:
JOB_MESSAGE_DEBUG: (4090975b38efaeae): Adding workflow start and stop steps.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe805b: 2016-12-11T03:47:54.587Z:
JOB_MESSAGE_DEBUG: (8a25028ef780cac5): Assigning stage ids.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe80c5: 2016-12-11T03:47:54.693Z:
JOB_MESSAGE_DEBUG: (285c502f0cd471b5): Executing wait step start2
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe80cf: 2016-12-11T03:47:54.703Z:
JOB_MESSAGE_DEBUG: (835808770f915d2e): Executing operation start
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe80da: 2016-12-11T03:47:54.714Z:
JOB_MESSAGE_DEBUG: (835808770f9151e4): Value "start.out" materialized.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe80f1: 2016-12-11T03:47:54.737Z:
JOB_MESSAGE_DEBUG: (d430baaaba086c4f): Executing operation side
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe80fc: 2016-12-11T03:47:54.748Z:
JOB_MESSAGE_DEBUG: (f1afe39c76c21d7): Value "side.out" materialized.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe8106: 2016-12-11T03:47:54.758Z:
JOB_MESSAGE_BASIC: S01: (8a25028ef780ce70): Executing operation ViewAsSingleton(side.None)/CreatePCollectionView
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe8112: 2016-12-11T03:47:54.770Z:
JOB_MESSAGE_DEBUG: (d6c0e30328f5f856): Value "ViewAsSingleton(side.None)/CreatePCollectionView.out"
materialized.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe811b: 2016-12-11T03:47:54.779Z:
JOB_MESSAGE_BASIC: S02: (835808770f91569a): Executing operation compute
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe81e6: 2016-12-11T03:47:54.982Z:
JOB_MESSAGE_DEBUG: (34caaedc8bbcecd0): Starting worker pool setup.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebfe81e8: 2016-12-11T03:47:54.984Z:
JOB_MESSAGE_BASIC: (34caaedc8bbcedbe): Starting 1 workers...
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ebff8c30: 2016-12-11T03:49:03.152Z:
JOB_MESSAGE_DETAILED: (40181ac805c140c1): Workers have started successfully.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ec010cd9: 2016-12-11T03:50:41.625Z:
JOB_MESSAGE_ERROR: (824dc359c2db16a): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 514,
in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 892, in dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:24008)
    op.start()
  File "dataflow_worker/executor.py", line 456, in dataflow_worker.executor.DoOperation.start
(dataflow_worker/executor.c:13870)
    def start(self):
  File "dataflow_worker/executor.py", line 483, in dataflow_worker.executor.DoOperation.start
(dataflow_worker/executor.c:13685)
    self.dofn_runner = common.DoFnRunner(
  File "apache_beam/runners/common.py", line 89, in apache_beam.runners.common.DoFnRunner.__init__
(apache_beam/runners/common.c:3469)
    args, kwargs, [side_input[global_window]
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/transforms/sideinputs.py", line
192, in __getitem__
    _FilteringIterable(self._iterable, target_window), self._view_options)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/pvalue.py", line 279, in _from_runtime_iterable
    'PCollection with more than one element accessed as '
ValueError: PCollection with more than one element accessed as a singleton view.

root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ec010d40: 2016-12-11T03:50:41.728Z:
JOB_MESSAGE_ERROR: (824dc359c2db3fa): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 514,
in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 892, in dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:24008)
    op.start()
  File "dataflow_worker/executor.py", line 456, in dataflow_worker.executor.DoOperation.start
(dataflow_worker/executor.c:13870)
    def start(self):
  File "dataflow_worker/executor.py", line 483, in dataflow_worker.executor.DoOperation.start
(dataflow_worker/executor.c:13685)
    self.dofn_runner = common.DoFnRunner(
  File "apache_beam/runners/common.py", line 89, in apache_beam.runners.common.DoFnRunner.__init__
(apache_beam/runners/common.c:3469)
    args, kwargs, [side_input[global_window]
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/transforms/sideinputs.py", line
192, in __getitem__
    _FilteringIterable(self._iterable, target_window), self._view_options)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/pvalue.py", line 279, in _from_runtime_iterable
    'PCollection with more than one element accessed as '
ValueError: PCollection with more than one element accessed as a singleton view.

root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ec010da5: 2016-12-11T03:50:41.829Z:
JOB_MESSAGE_ERROR: (824dc359c2db68a): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 514,
in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 892, in dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:24008)
    op.start()
  File "dataflow_worker/executor.py", line 456, in dataflow_worker.executor.DoOperation.start
(dataflow_worker/executor.c:13870)
    def start(self):
  File "dataflow_worker/executor.py", line 483, in dataflow_worker.executor.DoOperation.start
(dataflow_worker/executor.c:13685)
    self.dofn_runner = common.DoFnRunner(
  File "apache_beam/runners/common.py", line 89, in apache_beam.runners.common.DoFnRunner.__init__
(apache_beam/runners/common.c:3469)
    args, kwargs, [side_input[global_window]
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/transforms/sideinputs.py", line
192, in __getitem__
    _FilteringIterable(self._iterable, target_window), self._view_options)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/pvalue.py", line 279, in _from_runtime_iterable
    'PCollection with more than one element accessed as '
ValueError: PCollection with more than one element accessed as a singleton view.

root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ec010e20: 2016-12-11T03:50:41.952Z:
JOB_MESSAGE_ERROR: (824dc359c2db91a): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 514,
in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 892, in dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:24008)
    op.start()
  File "dataflow_worker/executor.py", line 456, in dataflow_worker.executor.DoOperation.start
(dataflow_worker/executor.c:13870)
    def start(self):
  File "dataflow_worker/executor.py", line 483, in dataflow_worker.executor.DoOperation.start
(dataflow_worker/executor.c:13685)
    self.dofn_runner = common.DoFnRunner(
  File "apache_beam/runners/common.py", line 89, in apache_beam.runners.common.DoFnRunner.__init__
(apache_beam/runners/common.c:3469)
    args, kwargs, [side_input[global_window]
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/transforms/sideinputs.py", line
192, in __getitem__
    _FilteringIterable(self._iterable, target_window), self._view_options)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/pvalue.py", line 279, in _from_runtime_iterable
    'PCollection with more than one element accessed as '
ValueError: PCollection with more than one element accessed as a singleton view.

root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ec010e84: 2016-12-11T03:50:42.052Z:
JOB_MESSAGE_ERROR: (824dc359c2dbbaa): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 514,
in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 892, in dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:24008)
    op.start()
  File "dataflow_worker/executor.py", line 456, in dataflow_worker.executor.DoOperation.start
(dataflow_worker/executor.c:13870)
    def start(self):
  File "dataflow_worker/executor.py", line 483, in dataflow_worker.executor.DoOperation.start
(dataflow_worker/executor.c:13685)
    self.dofn_runner = common.DoFnRunner(
  File "apache_beam/runners/common.py", line 89, in apache_beam.runners.common.DoFnRunner.__init__
(apache_beam/runners/common.c:3469)
    args, kwargs, [side_input[global_window]
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/transforms/sideinputs.py", line
192, in __getitem__
    _FilteringIterable(self._iterable, target_window), self._view_options)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/pvalue.py", line 279, in _from_runtime_iterable
    'PCollection with more than one element accessed as '
ValueError: PCollection with more than one element accessed as a singleton view.

root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ec010ea9: 2016-12-11T03:50:42.089Z:
JOB_MESSAGE_DEBUG: (f4416e5e8aad1c8a): Executing failure step failure1
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ec010eac: 2016-12-11T03:50:42.092Z:
JOB_MESSAGE_ERROR: (f4416e5e8aad1c78): Workflow failed. Causes: (835808770f9158cf): S02:compute
failed.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ec010edf: 2016-12-11T03:50:42.143Z:
JOB_MESSAGE_DETAILED: (4090975b38efa5c7): Cleaning up.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ec010f67: 2016-12-11T03:50:42.279Z:
JOB_MESSAGE_DEBUG: (4090975b38eface0): Starting worker pool teardown.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ec010f69: 2016-12-11T03:50:42.281Z:
JOB_MESSAGE_BASIC: (4090975b38efac46): Stopping worker pool...
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ec026f00: 2016-12-11T03:52:12.288Z:
JOB_MESSAGE_BASIC: (4090975b38efa35f): Worker pool stopped.
root: INFO: 2016-12-10_19_47_53-5199068634394198370_00000158ec026f13: 2016-12-11T03:52:12.307Z:
JOB_MESSAGE_DEBUG: (4090975b38efa9de): Tearing down pending resources...
root: INFO: Job 2016-12-10_19_47_53-5199068634394198370 is in state JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
Ran 21 tests in 5071.428s

FAILED (errors=1)
Build step 'Execute shell' marked build as failure

Mime
View raw message