You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2016/09/23 15:04:36 UTC

Build failed in Jenkins: beam_PostCommit_PythonVerify #441

See <https://builds.apache.org/job/beam_PostCommit_PythonVerify/441/>

------------------------------------------
[...truncated 2648 lines...]
                  "@type": "SingletonCoder$<string of 344 bytes>", 
                  "component_encodings": []
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "write/WriteImpl/GroupByKey.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s12"
        }, 
        "serialized_fn": "<string of 356 bytes>", 
        "user_name": "write/WriteImpl/GroupByKey"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s14", 
      "properties": {
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "WindowedValueCoder$<string of 736 bytes>", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "TimestampCoder$gAJjYXBhY2hlX2JlYW0uY29kZXJzLmNvZGVycwpUaW1lc3RhbXBDb2RlcgpxACmBcQF9cQJiLg==", 
                  "component_encodings": []
                }, 
                {
                  "@type": "SingletonCoder$<string of 344 bytes>", 
                  "component_encodings": []
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "write/WriteImpl/FlatMap(<lambda at iobase.py:1037>).out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s13"
        }, 
        "serialized_fn": "<string of 1316 bytes>", 
        "user_name": "write/WriteImpl/FlatMap(<lambda at iobase.py:1037>)"
      }
    }, 
    {
      "kind": "CollectionToSingleton", 
      "name": "s15", 
      "properties": {
        "output_info": [
          {
            "encoding": {
              "@type": "WindowedValueCoder$<string of 736 bytes>", 
              "component_encodings": [
                {
                  "@type": "WindowedValueCoder$<string of 736 bytes>", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                      "component_encodings": [
                        {
                          "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                          "component_encodings": []
                        }, 
                        {
                          "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                          "component_encodings": []
                        }
                      ], 
                      "is_pair_like": true
                    }, 
                    {
                      "@type": "TimestampCoder$gAJjYXBhY2hlX2JlYW0uY29kZXJzLmNvZGVycwpUaW1lc3RhbXBDb2RlcgpxACmBcQF9cQJiLg==", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "SingletonCoder$<string of 344 bytes>", 
                      "component_encodings": []
                    }
                  ], 
                  "is_wrapper": true
                }
              ]
            }, 
            "output_name": "out", 
            "user_name": "write/WriteImpl/ViewAsIterable(write|WriteImpl|FlatMap(<lambda at iobase.py:1037>).None)/CreatePCollectionView.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s14"
        }, 
        "user_name": "write/WriteImpl/ViewAsIterable(write|WriteImpl|FlatMap(<lambda at iobase.py:1037>).None)/CreatePCollectionView"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s16", 
      "properties": {
        "non_parallel_inputs": {
          "s15": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "s15"
          }, 
          "s9": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "s9"
          }
        }, 
        "output_info": [
          {
            "encoding": {
              "@type": "WindowedValueCoder$<string of 736 bytes>", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$<string of 172 bytes>", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "TimestampCoder$gAJjYXBhY2hlX2JlYW0uY29kZXJzLmNvZGVycwpUaW1lc3RhbXBDb2RlcgpxACmBcQF9cQJiLg==", 
                  "component_encodings": []
                }, 
                {
                  "@type": "SingletonCoder$<string of 344 bytes>", 
                  "component_encodings": []
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "write/WriteImpl/finalize_write.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s7"
        }, 
        "serialized_fn": "<string of 2028 bytes>", 
        "user_name": "write/WriteImpl/finalize_write"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
INFO:root:Create job: <Job
 id: u'2016-09-23_08_03_52-11018877666233022789'
 projectId: u'apache-beam-testing'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
INFO:root:Created job with id: [2016-09-23_08_03_52-11018877666233022789]
INFO:root:To access the Dataflow monitoring console, please navigate to https://console.developers.google.com/project/apache-beam-testing/dataflow/job/2016-09-23_08_03_52-11018877666233022789
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fc5a: 2016-09-23T15:03:53.178Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb7d26): Checking required Cloud APIs are enabled.
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fd7a: 2016-09-23T15:03:53.466Z: JOB_MESSAGE_DEBUG: (80dd9717f3bb7373): Combiner lifting skipped for step write/WriteImpl/GroupByKey: GroupByKey not followed by a combiner.
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fd7d: 2016-09-23T15:03:53.469Z: JOB_MESSAGE_DEBUG: (80dd9717f3bb7dbd): Combiner lifting skipped for step group: GroupByKey not followed by a combiner.
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fd7f: 2016-09-23T15:03:53.471Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb7807): Expanding GroupByKey operations into optimizable parts.
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fd82: 2016-09-23T15:03:53.474Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb7251): Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fd88: 2016-09-23T15:03:53.480Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb712f): Annotating graph with Autotuner information.
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fde9: 2016-09-23T15:03:53.577Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb7a57): Fusing adjacent ParDo, Read, Write, and Flatten operations
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fdec: 2016-09-23T15:03:53.580Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb74a1): Fusing consumer split into read
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fdef: 2016-09-23T15:03:53.583Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb7eeb): Fusing consumer group/Reify into pair_with_one
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fdf1: 2016-09-23T15:03:53.585Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb7935): Fusing consumer format into count
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fdf3: 2016-09-23T15:03:53.587Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb737f): Fusing consumer write/WriteImpl/GroupByKey/GroupByWindow into write/WriteImpl/GroupByKey/Read
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fdf5: 2016-09-23T15:03:53.589Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb7dc9): Fusing consumer write/WriteImpl/GroupByKey/Write into write/WriteImpl/GroupByKey/Reify
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fdfa: 2016-09-23T15:03:53.594Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb725d): Fusing consumer write/WriteImpl/FlatMap(<lambda at iobase.py:1037>) into write/WriteImpl/GroupByKey/GroupByWindow
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fdfd: 2016-09-23T15:03:53.597Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb7ca7): Fusing consumer count into group/GroupByWindow
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fdff: 2016-09-23T15:03:53.599Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb76f1): Fusing consumer write/WriteImpl/WindowInto into write/WriteImpl/Map(<lambda at iobase.py:1034>)
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fe02: 2016-09-23T15:03:53.602Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb713b): Fusing consumer write/WriteImpl/GroupByKey/Reify into write/WriteImpl/WindowInto
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fe05: 2016-09-23T15:03:53.605Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb7b85): Fusing consumer write/WriteImpl/Map(<lambda at iobase.py:1034>) into write/WriteImpl/write_bundles
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fe07: 2016-09-23T15:03:53.607Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb75cf): Fusing consumer pair_with_one into split
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fe09: 2016-09-23T15:03:53.609Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb7019): Fusing consumer group/GroupByWindow into group/Read
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fe0b: 2016-09-23T15:03:53.611Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb7a63): Fusing consumer write/WriteImpl/write_bundles into format
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fe0d: 2016-09-23T15:03:53.613Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb74ad): Fusing consumer group/Write into group/Reify
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fe53: 2016-09-23T15:03:53.683Z: JOB_MESSAGE_DEBUG: (80dd9717f3bb7ded): Workflow config is missing a default resource spec.
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fe56: 2016-09-23T15:03:53.686Z: JOB_MESSAGE_DETAILED: (80dd9717f3bb7837): Adding StepResource setup and teardown to workflow graph.
INFO:root:Job 2016-09-23_08_03_52-11018877666233022789 is in state JOB_STATE_RUNNING
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fe6e: 2016-09-23T15:03:53.710Z: JOB_MESSAGE_DEBUG: (c12aec0da48fa2e9): Adding workflow start and stop steps.
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fe92: 2016-09-23T15:03:53.746Z: JOB_MESSAGE_DEBUG: (f41173a56e3c68bd): Assigning stage ids.
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fec8: 2016-09-23T15:03:53.800Z: JOB_MESSAGE_DEBUG: (f41173a56e3c6f2e): Executing wait step start2
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fed0: 2016-09-23T15:03:53.808Z: JOB_MESSAGE_DEBUG: (56874678b42f8700): Executing operation write/WriteImpl/DoOnce
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fedd: 2016-09-23T15:03:53.821Z: JOB_MESSAGE_DEBUG: (769be5d4a374285): Value "write/WriteImpl/DoOnce.out" materialized.
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792fee6: 2016-09-23T15:03:53.830Z: JOB_MESSAGE_BASIC: S04: (d973a82534f11491): Executing operation write/WriteImpl/initialize_write
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792ff9c: 2016-09-23T15:03:54.012Z: JOB_MESSAGE_DEBUG: (3b5b1704debdc55b): Starting worker pool setup.
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792ff9e: 2016-09-23T15:03:54.014Z: JOB_MESSAGE_BASIC: (3b5b1704debdccc9): Starting 1 workers...
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792ffae: 2016-09-23T15:03:54.030Z: JOB_MESSAGE_DEBUG: (1fa3e7b43e727bc4): Value "group/Session" materialized.
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575792ffb8: 2016-09-23T15:03:54.040Z: JOB_MESSAGE_BASIC: S02: (f41173a56e3c6e24): Executing operation read+split+pair_with_one+group/Reify+group/Write
INFO:root:2016-09-23_08_03_52-11018877666233022789_0000015757939409: 2016-09-23T15:04:32.009Z: JOB_MESSAGE_ERROR: (9024a3e8295e174e): Workflow failed. Causes: (ca8195cda7b60d44): Unable to bring up enough workers: minimum 1, actual 0.
INFO:root:2016-09-23_08_03_52-11018877666233022789_0000015757939442: 2016-09-23T15:04:32.066Z: JOB_MESSAGE_DETAILED: (239b3680fae15c74): Cleaning up.
INFO:root:2016-09-23_08_03_52-11018877666233022789_0000015757939453: 2016-09-23T15:04:32.083Z: JOB_MESSAGE_DEBUG: (239b3680fae15c01): Starting worker pool teardown.
INFO:root:2016-09-23_08_03_52-11018877666233022789_0000015757939454: 2016-09-23T15:04:32.084Z: JOB_MESSAGE_BASIC: (239b3680fae1565f): Stopping worker pool...
INFO:root:2016-09-23_08_03_52-11018877666233022789_000001575793984f: 2016-09-23T15:04:33.103Z: JOB_MESSAGE_DEBUG: (239b3680fae15579): Tearing down pending resources...
INFO:root:Job 2016-09-23_08_03_52-11018877666233022789 is in state JOB_STATE_FAILED
Traceback (most recent call last):
  File "/usr/lib/python2.7/runpy.py", line 162, in _run_module_as_main
    "__main__", fname, loader, pkg_name)
  File "/usr/lib/python2.7/runpy.py", line 72, in _run_code
    exec code in run_globals
  File "<https://builds.apache.org/job/beam_PostCommit_PythonVerify/ws/sdks/python/apache_beam/examples/wordcount.py",> line 107, in <module>
    run()
  File "<https://builds.apache.org/job/beam_PostCommit_PythonVerify/ws/sdks/python/apache_beam/examples/wordcount.py",> line 98, in run
    result = p.run()
  File "apache_beam/pipeline.py", line 159, in run
    return self.runner.run(self)
  File "apache_beam/runners/dataflow_runner.py", line 188, in run
    % getattr(self, 'last_error_msg', None), self.result)
apache_beam.runners.dataflow_runner.DataflowRuntimeException: Dataflow pipeline failed:
(9024a3e8295e174e): Workflow failed. Causes: (ca8195cda7b60d44): Unable to bring up enough workers: minimum 1, actual 0.

# Grep will exit with status 1 if success message was not found.
echo ">>> CHECKING JOB SUCCESS"
>>> CHECKING JOB SUCCESS
grep JOB_STATE_DONE job_output
Build step 'Execute shell' marked build as failure

Jenkins build is back to normal : beam_PostCommit_PythonVerify #442

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_PythonVerify/442/changes>