You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2016/07/22 00:49:22 UTC

Build failed in Jenkins: beam_PostCommit_PythonVerify #135

See <https://builds.apache.org/job/beam_PostCommit_PythonVerify/135/changes>

Changes:

[robertwb] Minor cdef value changes.

------------------------------------------
[...truncated 3140 lines...]
                          "component_encodings": []
                        }
                      ], 
                      "is_pair_like": true
                    }, 
                    {
                      "@type": "TimestampCoder$gAJjYXBhY2hlX2JlYW0uY29kZXJzLmNvZGVycwpUaW1lc3RhbXBDb2RlcgpxACmBcQF9cQJiLg==", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "SingletonCoder$<string of 344 bytes>", 
                      "component_encodings": []
                    }
                  ], 
                  "is_wrapper": true
                }
              ]
            }, 
            "output_name": "out", 
            "user_name": "write/WriteImpl/ViewAsIterable(write|WriteImpl|write_bundles.None)/CreatePCollectionView.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s10"
        }, 
        "user_name": "write/WriteImpl/ViewAsIterable(write|WriteImpl|write_bundles.None)/CreatePCollectionView"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s12", 
      "properties": {
        "non_parallel_inputs": {
          "s11": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "s11"
          }, 
          "s9": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "s9"
          }
        }, 
        "output_info": [
          {
            "encoding": {
              "@type": "WindowedValueCoder$<string of 736 bytes>", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "TimestampCoder$gAJjYXBhY2hlX2JlYW0uY29kZXJzLmNvZGVycwpUaW1lc3RhbXBDb2RlcgpxACmBcQF9cQJiLg==", 
                  "component_encodings": []
                }, 
                {
                  "@type": "SingletonCoder$<string of 344 bytes>", 
                  "component_encodings": []
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "write/WriteImpl/finalize_write.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s7"
        }, 
        "serialized_fn": "<string of 1496 bytes>", 
        "user_name": "write/WriteImpl/finalize_write"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
INFO:root:Create job: <Job
 id: u'2016-07-21_17_45_32-15704347142994509983'
 projectId: u'apache-beam-testing'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
INFO:root:Created job with id: [2016-07-21_17_45_32-15704347142994509983]
INFO:root:To access the Dataflow monitoring console, please navigate to https://console.developers.google.com/project/apache-beam-testing/dataflow/job/2016-07-21_17_45_32-15704347142994509983
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561010842a: 2016-07-22T00:45:33.098Z: JOB_MESSAGE_DETAILED: (da22568e39a75de): Checking required Cloud APIs are enabled.
INFO:root:Job 2016-07-21_17_45_32-15704347142994509983 is in state JOB_STATE_RUNNING
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108847: 2016-07-22T00:45:34.151Z: JOB_MESSAGE_DEBUG: (da22568e39a72bf): Combiner lifting skipped for step group: GroupByKey not followed by a combiner.
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108849: 2016-07-22T00:45:34.153Z: JOB_MESSAGE_DETAILED: (da22568e39a7af1): Expanding GroupByKey operations into optimizable parts.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561010884c: 2016-07-22T00:45:34.156Z: JOB_MESSAGE_DETAILED: (da22568e39a7323): Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108850: 2016-07-22T00:45:34.160Z: JOB_MESSAGE_DETAILED: (da22568e39a7387): Annotating graph with Autotuner information.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561010886d: 2016-07-22T00:45:34.189Z: JOB_MESSAGE_DETAILED: (da22568e39a744f): Fusing adjacent ParDo, Read, Write, and Flatten operations
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108870: 2016-07-22T00:45:34.192Z: JOB_MESSAGE_DETAILED: (da22568e39a7c81): Fusing consumer split into read
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108872: 2016-07-22T00:45:34.194Z: JOB_MESSAGE_DETAILED: (da22568e39a74b3): Fusing consumer group/Reify into pair_with_one
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108874: 2016-07-22T00:45:34.196Z: JOB_MESSAGE_DETAILED: (da22568e39a7ce5): Fusing consumer format into count
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108876: 2016-07-22T00:45:34.198Z: JOB_MESSAGE_DETAILED: (da22568e39a7517): Fusing consumer count into group/GroupByWindow
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561010887b: 2016-07-22T00:45:34.203Z: JOB_MESSAGE_DETAILED: (da22568e39a757b): Fusing consumer pair_with_one into split
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561010887d: 2016-07-22T00:45:34.205Z: JOB_MESSAGE_DETAILED: (da22568e39a7dad): Fusing consumer group/GroupByWindow into group/Read
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108880: 2016-07-22T00:45:34.208Z: JOB_MESSAGE_DETAILED: (da22568e39a75df): Fusing consumer write/WriteImpl/write_bundles into format
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108883: 2016-07-22T00:45:34.211Z: JOB_MESSAGE_DETAILED: (da22568e39a7e11): Fusing consumer group/Write into group/Reify
INFO:root:2016-07-21_17_45_32-15704347142994509983_00000156101088b6: 2016-07-22T00:45:34.262Z: JOB_MESSAGE_DEBUG: (da22568e39a725d): Workflow config is missing a default resource spec.
INFO:root:2016-07-21_17_45_32-15704347142994509983_00000156101088b8: 2016-07-22T00:45:34.264Z: JOB_MESSAGE_DETAILED: (da22568e39a7a8f): Adding StepResource setup and teardown to workflow graph.
INFO:root:2016-07-21_17_45_32-15704347142994509983_00000156101088dc: 2016-07-22T00:45:34.300Z: JOB_MESSAGE_DEBUG: (d5c81a223fc34377): Adding workflow start and stop steps.
INFO:root:2016-07-21_17_45_32-15704347142994509983_00000156101088e7: 2016-07-22T00:45:34.311Z: JOB_MESSAGE_DEBUG: (7667f0b2f9f0e688): Assigning stage ids.
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108947: 2016-07-22T00:45:34.407Z: JOB_MESSAGE_DEBUG: (c7b25d2b46bbb633): Executing wait step start2
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108950: 2016-07-22T00:45:34.416Z: JOB_MESSAGE_DEBUG: (e35571d571f641c5): Executing operation write/WriteImpl/DoOnce
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108953: 2016-07-22T00:45:34.419Z: JOB_MESSAGE_BASIC: S02: (9cd1197ee1aab0da): Executing operation group/Create
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561010895c: 2016-07-22T00:45:34.428Z: JOB_MESSAGE_DEBUG: (d5c81a223fc34453): Value "write/WriteImpl/DoOnce.out" materialized.
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108966: 2016-07-22T00:45:34.438Z: JOB_MESSAGE_BASIC: S01: (6e24cb682a587192): Executing operation write/WriteImpl/initialize_write
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108a1f: 2016-07-22T00:45:34.623Z: JOB_MESSAGE_DEBUG: (8e74804a765e239f): Starting worker pool setup.
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108a22: 2016-07-22T00:45:34.626Z: JOB_MESSAGE_BASIC: (8e74804a765e27d5): Starting 1 workers...
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108a32: 2016-07-22T00:45:34.642Z: JOB_MESSAGE_DEBUG: (e35571d571f6462f): Value "group/Session" materialized.
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108a3c: 2016-07-22T00:45:34.652Z: JOB_MESSAGE_BASIC: S03: (592622882b970b48): Executing operation read+split+pair_with_one+group/Reify+group/Write
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610114bda: 2016-07-22T00:46:24.218Z: JOB_MESSAGE_DETAILED: (ece1e6d99886c79e): Workers have started successfully.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012a09a: 2016-07-22T00:47:51.450Z: JOB_MESSAGE_ERROR: (89b961e010ca0d5e): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012a4cb: 2016-07-22T00:47:52.523Z: JOB_MESSAGE_ERROR: (6fae018b96d726fe): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012a917: 2016-07-22T00:47:53.623Z: JOB_MESSAGE_ERROR: (812f831deabb914b): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012b13c: 2016-07-22T00:47:55.708Z: JOB_MESSAGE_ERROR: (7dee846727b71bd2): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012b57a: 2016-07-22T00:47:56.794Z: JOB_MESSAGE_ERROR: (22a33cb749f05a09): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012b9bb: 2016-07-22T00:47:57.883Z: JOB_MESSAGE_ERROR: (dd4fa0308c8ab691): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012b9e4: 2016-07-22T00:47:57.924Z: JOB_MESSAGE_DEBUG: (6251030b36fbb057): Executing failure step failure1
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012b9e7: 2016-07-22T00:47:57.927Z: JOB_MESSAGE_ERROR: (6251030b36fbb701): Workflow failed. Causes: (592622882b970f63): S03:read+split+pair_with_one+group/Reify+group/Write failed.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012ba22: 2016-07-22T00:47:57.986Z: JOB_MESSAGE_DETAILED: (6e8d0c8e21a631f4): Cleaning up.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012baa2: 2016-07-22T00:47:58.114Z: JOB_MESSAGE_DEBUG: (6e8d0c8e21a63dd7): Starting worker pool teardown.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012baa5: 2016-07-22T00:47:58.117Z: JOB_MESSAGE_BASIC: (6e8d0c8e21a63b19): Stopping worker pool...
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012bdf3: 2016-07-22T00:47:58.963Z: JOB_MESSAGE_DETAILED: (c2d3412301a1c28c): Failed to publish the result of the work update. Causes: (c2d3412301a1cc53): Failed to update work status. Causes: (14133f7d32eb9074): Work "1446569712273821742" not leased (or the lease was lost).
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561013f32c: 2016-07-22T00:49:18.124Z: JOB_MESSAGE_BASIC: (6e8d0c8e21a636fc): Worker pool stopped.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561013f78e: 2016-07-22T00:49:19.246Z: JOB_MESSAGE_DEBUG: (6e8d0c8e21a6326b): Tearing down pending resources...
INFO:root:Job 2016-07-21_17_45_32-15704347142994509983 is in state JOB_STATE_FAILED
Traceback (most recent call last):
  File "/usr/lib/python2.7/runpy.py", line 162, in _run_module_as_main
    "__main__", fname, loader, pkg_name)
  File "/usr/lib/python2.7/runpy.py", line 72, in _run_code
    exec code in run_globals
  File "<https://builds.apache.org/job/beam_PostCommit_PythonVerify/ws/sdks/python/apache_beam/examples/wordcount.py",> line 102, in <module>
    run()
  File "<https://builds.apache.org/job/beam_PostCommit_PythonVerify/ws/sdks/python/apache_beam/examples/wordcount.py",> line 93, in run
    result = p.run()
  File "apache_beam/pipeline.py", line 159, in run
    return self.runner.run(self)
  File "apache_beam/runners/dataflow_runner.py", line 188, in run
    % getattr(self, 'last_error_msg', None), self.result)
apache_beam.runners.dataflow_runner.DataflowRuntimeException: Dataflow pipeline failed:
(dd4fa0308c8ab691): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'


# Grep will exit with status 1 if success message was not found.
echo ">>> CHECKING JOB SUCCESS"
>>> CHECKING JOB SUCCESS
grep JOB_STATE_DONE job_output
Build step 'Execute shell' marked build as failure

Jenkins build is back to normal : beam_PostCommit_PythonVerify #138

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PythonVerify/138/>


Build failed in Jenkins: beam_PostCommit_PythonVerify #137

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PythonVerify/137/>

------------------------------------------
[...truncated 4769 lines...]
              "component_encodings": [
                {
                  "@type": "WindowedValueCoder$<string of 736 bytes>", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                      "component_encodings": [
                        {
                          "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                          "component_encodings": []
                        }, 
                        {
                          "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                          "component_encodings": []
                        }
                      ], 
                      "is_pair_like": true
                    }, 
                    {
                      "@type": "TimestampCoder$gAJjYXBhY2hlX2JlYW0uY29kZXJzLmNvZGVycwpUaW1lc3RhbXBDb2RlcgpxACmBcQF9cQJiLg==", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "SingletonCoder$<string of 344 bytes>", 
                      "component_encodings": []
                    }
                  ], 
                  "is_wrapper": true
                }
              ]
            }, 
            "output_name": "out", 
            "user_name": "write/WriteImpl/ViewAsIterable(write|WriteImpl|write_bundles.None)/CreatePCollectionView.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s10"
        }, 
        "user_name": "write/WriteImpl/ViewAsIterable(write|WriteImpl|write_bundles.None)/CreatePCollectionView"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s12", 
      "properties": {
        "non_parallel_inputs": {
          "s11": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "s11"
          }, 
          "s9": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "s9"
          }
        }, 
        "output_info": [
          {
            "encoding": {
              "@type": "WindowedValueCoder$<string of 736 bytes>", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "TimestampCoder$gAJjYXBhY2hlX2JlYW0uY29kZXJzLmNvZGVycwpUaW1lc3RhbXBDb2RlcgpxACmBcQF9cQJiLg==", 
                  "component_encodings": []
                }, 
                {
                  "@type": "SingletonCoder$<string of 344 bytes>", 
                  "component_encodings": []
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "write/WriteImpl/finalize_write.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s7"
        }, 
        "serialized_fn": "<string of 1496 bytes>", 
        "user_name": "write/WriteImpl/finalize_write"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
INFO:root:Create job: <Job
 id: u'2016-07-21_20_02_13-3133041382314963714'
 projectId: u'apache-beam-testing'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
INFO:root:Created job with id: [2016-07-21_20_02_13-3133041382314963714]
INFO:root:To access the Dataflow monitoring console, please navigate to https://console.developers.google.com/project/apache-beam-testing/dataflow/job/2016-07-21_20_02_13-3133041382314963714
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108da96f: 2016-07-22T03:02:14.639Z: JOB_MESSAGE_DETAILED: (c02161c42f99aba6): Checking required Cloud APIs are enabled.
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108daaf0: 2016-07-22T03:02:15.024Z: JOB_MESSAGE_DEBUG: (c02161c42f99a261): Combiner lifting skipped for step group: GroupByKey not followed by a combiner.
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108daaf3: 2016-07-22T03:02:15.027Z: JOB_MESSAGE_DETAILED: (c02161c42f99a5a7): Expanding GroupByKey operations into optimizable parts.
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108daaf5: 2016-07-22T03:02:15.029Z: JOB_MESSAGE_DETAILED: (c02161c42f99a8ed): Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108daaf9: 2016-07-22T03:02:15.033Z: JOB_MESSAGE_DETAILED: (c02161c42f99af79): Annotating graph with Autotuner information.
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dab21: 2016-07-22T03:02:15.073Z: JOB_MESSAGE_DETAILED: (c02161c42f99ac91): Fusing adjacent ParDo, Read, Write, and Flatten operations
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dab24: 2016-07-22T03:02:15.076Z: JOB_MESSAGE_DETAILED: (c02161c42f99afd7): Fusing consumer split into read
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dab26: 2016-07-22T03:02:15.078Z: JOB_MESSAGE_DETAILED: (c02161c42f99a31d): Fusing consumer group/Reify into pair_with_one
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dab28: 2016-07-22T03:02:15.080Z: JOB_MESSAGE_DETAILED: (c02161c42f99a663): Fusing consumer format into count
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dab2a: 2016-07-22T03:02:15.082Z: JOB_MESSAGE_DETAILED: (c02161c42f99a9a9): Fusing consumer count into group/GroupByWindow
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dab2e: 2016-07-22T03:02:15.086Z: JOB_MESSAGE_DETAILED: (c02161c42f99a035): Fusing consumer pair_with_one into split
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dab30: 2016-07-22T03:02:15.088Z: JOB_MESSAGE_DETAILED: (c02161c42f99a37b): Fusing consumer group/GroupByWindow into group/Read
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dab33: 2016-07-22T03:02:15.091Z: JOB_MESSAGE_DETAILED: (c02161c42f99a6c1): Fusing consumer write/WriteImpl/write_bundles into format
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dab35: 2016-07-22T03:02:15.093Z: JOB_MESSAGE_DETAILED: (c02161c42f99aa07): Fusing consumer group/Write into group/Reify
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dab66: 2016-07-22T03:02:15.142Z: JOB_MESSAGE_DEBUG: (c02161c42f99a20b): Workflow config is missing a default resource spec.
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dab69: 2016-07-22T03:02:15.145Z: JOB_MESSAGE_DETAILED: (c02161c42f99a551): Adding StepResource setup and teardown to workflow graph.
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dab79: 2016-07-22T03:02:15.161Z: JOB_MESSAGE_DEBUG: (59d1a513c0f9c1a3): Adding workflow start and stop steps.
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dab9b: 2016-07-22T03:02:15.195Z: JOB_MESSAGE_DEBUG: (a3f10a499a937d9f): Assigning stage ids.
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dac05: 2016-07-22T03:02:15.301Z: JOB_MESSAGE_DEBUG: (28fbc1c0b2b07924): Executing wait step start2
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dac0f: 2016-07-22T03:02:15.311Z: JOB_MESSAGE_DEBUG: (db48a060908b731e): Executing operation write/WriteImpl/DoOnce
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dac1a: 2016-07-22T03:02:15.322Z: JOB_MESSAGE_DEBUG: (b07e073caf0ab67): Value "write/WriteImpl/DoOnce.out" materialized.
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dac23: 2016-07-22T03:02:15.331Z: JOB_MESSAGE_BASIC: S01: (e347efeed0f80dac): Executing operation write/WriteImpl/initialize_write
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dac32: 2016-07-22T03:02:15.346Z: JOB_MESSAGE_BASIC: S02: (7856896a0a1d8124): Executing operation group/Create
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dacef: 2016-07-22T03:02:15.535Z: JOB_MESSAGE_DEBUG: (9645994919ea2837): Starting worker pool setup.
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dacf1: 2016-07-22T03:02:15.537Z: JOB_MESSAGE_BASIC: (9645994919ea2eb1): Starting 1 workers...
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dad03: 2016-07-22T03:02:15.555Z: JOB_MESSAGE_DEBUG: (bf0640589021b47d): Value "group/Session" materialized.
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108dad0e: 2016-07-22T03:02:15.566Z: JOB_MESSAGE_BASIC: S03: (9cf8454cbee6c8f9): Executing operation read+split+pair_with_one+group/Reify+group/Write
INFO:root:Job 2016-07-21_20_02_13-3133041382314963714 is in state JOB_STATE_RUNNING
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108eb94d: 2016-07-22T03:03:24.237Z: JOB_MESSAGE_DETAILED: (1e5caf59cfe26f89): Workers have started successfully.
INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108ffa29: 2016-07-22T03:04:46.377Z: JOB_MESSAGE_ERROR: (4da4331f624d48ff): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156108ffe67: 2016-07-22T03:04:47.463Z: JOB_MESSAGE_ERROR: (66ab22bb44e62e01): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156109002a8: 2016-07-22T03:04:48.552Z: JOB_MESSAGE_ERROR: (7968d4466242bce7): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_20_02_13-3133041382314963714_00000156109006ee: 2016-07-22T03:04:49.646Z: JOB_MESSAGE_ERROR: (439e184989371787): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_20_02_13-3133041382314963714_0000015610900b23: 2016-07-22T03:04:50.723Z: JOB_MESSAGE_ERROR: (f0fde8255b0ef27e): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_20_02_13-3133041382314963714_0000015610900b4d: 2016-07-22T03:04:50.765Z: JOB_MESSAGE_DEBUG: (a3f10a499a937337): Executing failure step failure1
INFO:root:2016-07-21_20_02_13-3133041382314963714_0000015610900b4f: 2016-07-22T03:04:50.767Z: JOB_MESSAGE_ERROR: (a3f10a499a937c9d): Workflow failed. Causes: (9cf8454cbee6cb12): S03:read+split+pair_with_one+group/Reify+group/Write failed.
INFO:root:2016-07-21_20_02_13-3133041382314963714_0000015610900b87: 2016-07-22T03:04:50.823Z: JOB_MESSAGE_DETAILED: (8ec6e51026b9b1d7): Cleaning up.
INFO:root:2016-07-21_20_02_13-3133041382314963714_0000015610900c0a: 2016-07-22T03:04:50.954Z: JOB_MESSAGE_DEBUG: (8ec6e51026b9b0f0): Starting worker pool teardown.
INFO:root:2016-07-21_20_02_13-3133041382314963714_0000015610900c0d: 2016-07-22T03:04:50.957Z: JOB_MESSAGE_BASIC: (8ec6e51026b9b056): Stopping worker pool...
INFO:root:2016-07-21_20_02_13-3133041382314963714_0000015610900f81: 2016-07-22T03:04:51.841Z: JOB_MESSAGE_DETAILED: (65e1ede4a6fd0cc2): Failed to publish the result of the work update. Causes: (65e1ede4a6fd0787): Failed to update work status. Causes: (66454ad10fe77ae2): Work "7369378627037131084" not leased (or the lease was lost).
INFO:root:2016-07-21_20_02_13-3133041382314963714_0000015610914495: 2016-07-22T03:06:10.965Z: JOB_MESSAGE_BASIC: (8ec6e51026b9bf6f): Worker pool stopped.
INFO:root:2016-07-21_20_02_13-3133041382314963714_000001561091490b: 2016-07-22T03:06:12.107Z: JOB_MESSAGE_DEBUG: (8ec6e51026b9bb86): Tearing down pending resources...
INFO:root:Job 2016-07-21_20_02_13-3133041382314963714 is in state JOB_STATE_FAILED
Traceback (most recent call last):
  File "/usr/lib/python2.7/runpy.py", line 162, in _run_module_as_main
    "__main__", fname, loader, pkg_name)
  File "/usr/lib/python2.7/runpy.py", line 72, in _run_code
    exec code in run_globals
  File "<https://builds.apache.org/job/beam_PostCommit_PythonVerify/ws/sdks/python/apache_beam/examples/wordcount.py",> line 102, in <module>
    run()
  File "<https://builds.apache.org/job/beam_PostCommit_PythonVerify/ws/sdks/python/apache_beam/examples/wordcount.py",> line 93, in run
    result = p.run()
  File "apache_beam/pipeline.py", line 159, in run
    return self.runner.run(self)
  File "apache_beam/runners/dataflow_runner.py", line 188, in run
    % getattr(self, 'last_error_msg', None), self.result)
apache_beam.runners.dataflow_runner.DataflowRuntimeException: Dataflow pipeline failed:
(f0fde8255b0ef27e): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'


# Grep will exit with status 1 if success message was not found.
echo ">>> CHECKING JOB SUCCESS"
>>> CHECKING JOB SUCCESS
grep JOB_STATE_DONE job_output
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PostCommit_PythonVerify #136

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PythonVerify/136/changes>

Changes:

[robertwb] Add tests for WindowedValue.

------------------------------------------
[...truncated 3961 lines...]
              "component_encodings": [
                {
                  "@type": "WindowedValueCoder$<string of 736 bytes>", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                      "component_encodings": [
                        {
                          "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                          "component_encodings": []
                        }, 
                        {
                          "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                          "component_encodings": []
                        }
                      ], 
                      "is_pair_like": true
                    }, 
                    {
                      "@type": "TimestampCoder$gAJjYXBhY2hlX2JlYW0uY29kZXJzLmNvZGVycwpUaW1lc3RhbXBDb2RlcgpxACmBcQF9cQJiLg==", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "SingletonCoder$<string of 344 bytes>", 
                      "component_encodings": []
                    }
                  ], 
                  "is_wrapper": true
                }
              ]
            }, 
            "output_name": "out", 
            "user_name": "write/WriteImpl/ViewAsIterable(write|WriteImpl|write_bundles.None)/CreatePCollectionView.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s10"
        }, 
        "user_name": "write/WriteImpl/ViewAsIterable(write|WriteImpl|write_bundles.None)/CreatePCollectionView"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s12", 
      "properties": {
        "non_parallel_inputs": {
          "s11": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "s11"
          }, 
          "s9": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "s9"
          }
        }, 
        "output_info": [
          {
            "encoding": {
              "@type": "WindowedValueCoder$<string of 736 bytes>", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "TimestampCoder$gAJjYXBhY2hlX2JlYW0uY29kZXJzLmNvZGVycwpUaW1lc3RhbXBDb2RlcgpxACmBcQF9cQJiLg==", 
                  "component_encodings": []
                }, 
                {
                  "@type": "SingletonCoder$<string of 344 bytes>", 
                  "component_encodings": []
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "write/WriteImpl/finalize_write.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s7"
        }, 
        "serialized_fn": "<string of 1496 bytes>", 
        "user_name": "write/WriteImpl/finalize_write"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
INFO:root:Create job: <Job
 id: u'2016-07-21_17_51_37-13201584638255343868'
 projectId: u'apache-beam-testing'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
INFO:root:Created job with id: [2016-07-21_17_51_37-13201584638255343868]
INFO:root:To access the Dataflow monitoring console, please navigate to https://console.developers.google.com/project/apache-beam-testing/dataflow/job/2016-07-21_17_51_37-13201584638255343868
INFO:root:2016-07-21_17_51_37-13201584638255343868_00000156101618ca: 2016-07-22T00:51:38.826Z: JOB_MESSAGE_DETAILED: (4f3def46854313b4): Checking required Cloud APIs are enabled.
INFO:root:Job 2016-07-21_17_51_37-13201584638255343868 is in state JOB_STATE_RUNNING
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161a26: 2016-07-22T00:51:39.174Z: JOB_MESSAGE_DEBUG: (4f3def46854317f9): Combiner lifting skipped for step group: GroupByKey not followed by a combiner.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161a29: 2016-07-22T00:51:39.177Z: JOB_MESSAGE_DETAILED: (4f3def4685431ab3): Expanding GroupByKey operations into optimizable parts.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161a2c: 2016-07-22T00:51:39.180Z: JOB_MESSAGE_DETAILED: (4f3def4685431d6d): Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161a31: 2016-07-22T00:51:39.185Z: JOB_MESSAGE_DETAILED: (4f3def46854312e1): Annotating graph with Autotuner information.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161a6c: 2016-07-22T00:51:39.244Z: JOB_MESSAGE_DETAILED: (4f3def4685431dc9): Fusing adjacent ParDo, Read, Write, and Flatten operations
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161a6f: 2016-07-22T00:51:39.247Z: JOB_MESSAGE_DETAILED: (4f3def4685431083): Fusing consumer split into read
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161a73: 2016-07-22T00:51:39.251Z: JOB_MESSAGE_DETAILED: (4f3def468543133d): Fusing consumer group/Reify into pair_with_one
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161a75: 2016-07-22T00:51:39.253Z: JOB_MESSAGE_DETAILED: (4f3def46854315f7): Fusing consumer format into count
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161a77: 2016-07-22T00:51:39.255Z: JOB_MESSAGE_DETAILED: (4f3def46854318b1): Fusing consumer count into group/GroupByWindow
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161a7d: 2016-07-22T00:51:39.261Z: JOB_MESSAGE_DETAILED: (4f3def4685431e25): Fusing consumer pair_with_one into split
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161a7f: 2016-07-22T00:51:39.263Z: JOB_MESSAGE_DETAILED: (4f3def46854310df): Fusing consumer group/GroupByWindow into group/Read
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161a82: 2016-07-22T00:51:39.266Z: JOB_MESSAGE_DETAILED: (4f3def4685431399): Fusing consumer write/WriteImpl/write_bundles into format
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161a85: 2016-07-22T00:51:39.269Z: JOB_MESSAGE_DETAILED: (4f3def4685431653): Fusing consumer group/Write into group/Reify
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161ac2: 2016-07-22T00:51:39.330Z: JOB_MESSAGE_DEBUG: (4f3def468543124f): Workflow config is missing a default resource spec.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161ac6: 2016-07-22T00:51:39.334Z: JOB_MESSAGE_DETAILED: (4f3def4685431509): Adding StepResource setup and teardown to workflow graph.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161ae9: 2016-07-22T00:51:39.369Z: JOB_MESSAGE_DEBUG: (b991d539699b3dad): Adding workflow start and stop steps.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161b1b: 2016-07-22T00:51:39.419Z: JOB_MESSAGE_DEBUG: (1138960ce236da4): Assigning stage ids.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161b8c: 2016-07-22T00:51:39.532Z: JOB_MESSAGE_DEBUG: (4f3def46854313da): Executing wait step start2
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161b9a: 2016-07-22T00:51:39.546Z: JOB_MESSAGE_BASIC: S02: (aac97674c28c4c7f): Executing operation group/Create
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161bb1: 2016-07-22T00:51:39.569Z: JOB_MESSAGE_DEBUG: (dc6a842c89d6f8fd): Executing operation write/WriteImpl/DoOnce
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161bc3: 2016-07-22T00:51:39.587Z: JOB_MESSAGE_DEBUG: (48fa59a3a6e0b9ef): Value "write/WriteImpl/DoOnce.out" materialized.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161bd0: 2016-07-22T00:51:39.600Z: JOB_MESSAGE_BASIC: S01: (b991d539699b3004): Executing operation write/WriteImpl/initialize_write
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161c66: 2016-07-22T00:51:39.750Z: JOB_MESSAGE_DEBUG: (b7e867830e35ddfb): Starting worker pool setup.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161c69: 2016-07-22T00:51:39.753Z: JOB_MESSAGE_BASIC: (b7e867830e35d465): Starting 1 workers...
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161c81: 2016-07-22T00:51:39.777Z: JOB_MESSAGE_DEBUG: (a993b3962c2158b6): Value "group/Session" materialized.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610161c8f: 2016-07-22T00:51:39.791Z: JOB_MESSAGE_BASIC: S03: (4f3def4685431c08): Executing operation read+split+pair_with_one+group/Reify+group/Write
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610178da7: 2016-07-22T00:53:14.279Z: JOB_MESSAGE_DETAILED: (40d5c1bf39052c56): Workers have started successfully.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610184aaf: 2016-07-22T00:54:02.671Z: JOB_MESSAGE_ERROR: (f9aaa5909718f9a8): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610184ee9: 2016-07-22T00:54:03.753Z: JOB_MESSAGE_ERROR: (d6ec76c2e7617e0e): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610185353: 2016-07-22T00:54:04.883Z: JOB_MESSAGE_ERROR: (7966faa632df5b4b): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_17_51_37-13201584638255343868_00000156101857ea: 2016-07-22T00:54:06.058Z: JOB_MESSAGE_ERROR: (47fa3864172b6bee): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610185c21: 2016-07-22T00:54:07.137Z: JOB_MESSAGE_ERROR: (3219b09f27891943): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'

INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610185c7a: 2016-07-22T00:54:07.226Z: JOB_MESSAGE_DEBUG: (4c14a458e3ae44bc): Executing failure step failure1
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610185c7d: 2016-07-22T00:54:07.229Z: JOB_MESSAGE_ERROR: (4c14a458e3ae4a06): Workflow failed. Causes: (4f3def46854315c1): S03:read+split+pair_with_one+group/Reify+group/Write failed.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610185cba: 2016-07-22T00:54:07.290Z: JOB_MESSAGE_DETAILED: (8c71a48a1f91b1f7): Cleaning up.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610185d0d: 2016-07-22T00:54:07.373Z: JOB_MESSAGE_DEBUG: (8c71a48a1f91be40): Starting worker pool teardown.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610185d10: 2016-07-22T00:54:07.376Z: JOB_MESSAGE_BASIC: (8c71a48a1f91bbc6): Stopping worker pool...
INFO:root:2016-07-21_17_51_37-13201584638255343868_000001561018604f: 2016-07-22T00:54:08.207Z: JOB_MESSAGE_DETAILED: (3fd3f1a5f1e66cbd): Failed to publish the result of the work update. Causes: (3fd3f1a5f1e664b8): Failed to update work status. Causes: (f8d716acac58cc73): Work "8707453335425830429" not leased (or the lease was lost).
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610199598: 2016-07-22T00:55:27.384Z: JOB_MESSAGE_BASIC: (8c71a48a1f91b80f): Worker pool stopped.
INFO:root:2016-07-21_17_51_37-13201584638255343868_0000015610199a1e: 2016-07-22T00:55:28.542Z: JOB_MESSAGE_DEBUG: (8c71a48a1f91b302): Tearing down pending resources...
INFO:root:Job 2016-07-21_17_51_37-13201584638255343868 is in state JOB_STATE_FAILED
Traceback (most recent call last):
  File "/usr/lib/python2.7/runpy.py", line 162, in _run_module_as_main
    "__main__", fname, loader, pkg_name)
  File "/usr/lib/python2.7/runpy.py", line 72, in _run_code
    exec code in run_globals
  File "<https://builds.apache.org/job/beam_PostCommit_PythonVerify/ws/sdks/python/apache_beam/examples/wordcount.py",> line 102, in <module>
    run()
  File "<https://builds.apache.org/job/beam_PostCommit_PythonVerify/ws/sdks/python/apache_beam/examples/wordcount.py",> line 93, in run
    result = p.run()
  File "apache_beam/pipeline.py", line 159, in run
    return self.runner.run(self)
  File "apache_beam/runners/dataflow_runner.py", line 188, in run
    % getattr(self, 'last_error_msg', None), self.result)
apache_beam.runners.dataflow_runner.DataflowRuntimeException: Dataflow pipeline failed:
(3219b09f27891943): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 462, in do_work
    work_executor.execute()
  File "dataflow_worker/executor.py", line 891, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24041)
    op.start()
  File "dataflow_worker/executor.py", line 477, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
    def start(self):
  File "dataflow_worker/executor.py", line 508, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
    self.dofn_runner.start()
  File "apache_beam/runners/common.py", line 92, in apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
    self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'


# Grep will exit with status 1 if success message was not found.
echo ">>> CHECKING JOB SUCCESS"
>>> CHECKING JOB SUCCESS
grep JOB_STATE_DONE job_output
Build step 'Execute shell' marked build as failure