You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2018/01/07 09:22:02 UTC

Build failed in Jenkins: beam_PostCommit_Python_Verify #3897

See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/3897/display/redirect>

------------------------------------------
[...truncated 1.14 MB...]
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s2"
        }, 
        "serialized_fn": "<string of 952 bytes>", 
        "user_name": "pair_with_one"
      }
    }, 
    {
      "kind": "GroupByKey", 
      "name": "s4", 
      "properties": {
        "display_data": [], 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "kind:pair", 
                  "component_encodings": [
                    {
                      "@type": "StrUtf8Coder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlzBJUWhJWkWziAeVyGDZmMhY20hU5IeAAajEkY=", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "kind:stream", 
                      "component_encodings": [
                        {
                          "@type": "VarIntCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxhiUWeeSXOIA5XIYNmYyFjbSFTkh4A89cR+g==", 
                          "component_encodings": []
                        }
                      ], 
                      "is_stream_like": true
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "group.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s3"
        }, 
        "serialized_fn": "%0AJ%22H%0A%1Dref_Coder_GlobalWindowCoder_1%12%27%0A%25%0A%23%0A%21urn%3Abeam%3Acoders%3Aglobal_window%3A0.1jT%0A%25%0A%23%0A%21beam%3Awindowfn%3Aglobal_windows%3Av0.1%10%01%1A%1Dref_Coder_GlobalWindowCoder_1%22%02%3A%00%28%010%018%01H%01", 
        "user_name": "group"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s5", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "count_ones"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "kind:pair", 
                  "component_encodings": [
                    {
                      "@type": "StrUtf8Coder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlzBJUWhJWkWziAeVyGDZmMhY20hU5IeAAajEkY=", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": [
                        {
                          "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                          "component_encodings": []
                        }, 
                        {
                          "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                          "component_encodings": []
                        }
                      ], 
                      "is_pair_like": true
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "count.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s4"
        }, 
        "serialized_fn": "<string of 1032 bytes>", 
        "user_name": "count"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s6", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "format_result"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "format.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s5"
        }, 
        "serialized_fn": "<string of 1028 bytes>", 
        "user_name": "format"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
 createTime: u'2018-01-07T09:15:45.203969Z'
 currentStateTime: u'1970-01-01T00:00:00Z'
 id: u'2018-01-07_01_15_44-4526533410717542241'
 location: u'us-central1'
 name: u'beamapp-jenkins-0107091543-700154'
 projectId: u'apache-beam-testing'
 stageStates: []
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2018-01-07_01_15_44-4526533410717542241]
root: INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-01-07_01_15_44-4526533410717542241?project=apache-beam-testing
root: INFO: Job 2018-01-07_01_15_44-4526533410717542241 is in state JOB_STATE_PENDING
root: INFO: 2018-01-07T09:15:44.691Z: JOB_MESSAGE_WARNING: (3ed17a2789c1a475): Setting the number of workers (1) disables autoscaling for this job. If you are trying to cap autoscaling, consider only setting max_num_workers. If you want to disable autoscaling altogether, the documented way is to explicitly use autoscalingAlgorithm=NONE.
root: INFO: 2018-01-07T09:15:47.072Z: JOB_MESSAGE_DETAILED: (acca1d5fee8b9764): Checking required Cloud APIs are enabled.
root: INFO: 2018-01-07T09:15:48.250Z: JOB_MESSAGE_DETAILED: (acca1d5fee8b9aa5): Expanding CollectionToSingleton operations into optimizable parts.
root: INFO: 2018-01-07T09:15:48.281Z: JOB_MESSAGE_DETAILED: (acca1d5fee8b97b4): Expanding CoGroupByKey operations into optimizable parts.
root: INFO: 2018-01-07T09:15:48.304Z: JOB_MESSAGE_DEBUG: (acca1d5fee8b94c3): Combiner lifting skipped for step group: GroupByKey not followed by a combiner.
root: INFO: 2018-01-07T09:15:48.329Z: JOB_MESSAGE_DETAILED: (acca1d5fee8b92cd): Expanding GroupByKey operations into optimizable parts.
root: INFO: 2018-01-07T09:15:48.365Z: JOB_MESSAGE_DEBUG: (acca1d5fee8b90d7): Annotating graph with Autotuner information.
root: INFO: 2018-01-07T09:15:48.409Z: JOB_MESSAGE_DETAILED: (acca1d5fee8b9ceb): Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2018-01-07T09:15:48.433Z: JOB_MESSAGE_DETAILED: (acca1d5fee8b9af5): Fusing consumer split into read/Read
root: INFO: 2018-01-07T09:15:48.459Z: JOB_MESSAGE_DETAILED: (acca1d5fee8b98ff): Fusing consumer group/Reify into pair_with_one
root: INFO: 2018-01-07T09:15:48.483Z: JOB_MESSAGE_DETAILED: (acca1d5fee8b9709): Fusing consumer format into count
root: INFO: 2018-01-07T09:15:48.508Z: JOB_MESSAGE_DETAILED: (acca1d5fee8b9513): Fusing consumer count into group/GroupByWindow
root: INFO: 2018-01-07T09:15:48.523Z: JOB_MESSAGE_DETAILED: (acca1d5fee8b931d): Fusing consumer pair_with_one into split
root: INFO: 2018-01-07T09:15:48.546Z: JOB_MESSAGE_DETAILED: (acca1d5fee8b9127): Fusing consumer group/Write into group/Reify
root: INFO: 2018-01-07T09:15:48.568Z: JOB_MESSAGE_DETAILED: (acca1d5fee8b9f31): Fusing consumer group/GroupByWindow into group/Read
root: INFO: 2018-01-07T09:15:48.594Z: JOB_MESSAGE_DEBUG: (acca1d5fee8b9d3b): Workflow config is missing a default resource spec.
root: INFO: 2018-01-07T09:15:48.610Z: JOB_MESSAGE_DEBUG: (acca1d5fee8b9b45): Adding StepResource setup and teardown to workflow graph.
root: INFO: 2018-01-07T09:15:48.623Z: JOB_MESSAGE_DEBUG: (acca1d5fee8b994f): Adding workflow start and stop steps.
root: INFO: 2018-01-07T09:15:48.641Z: JOB_MESSAGE_DEBUG: (acca1d5fee8b9759): Assigning stage ids.
root: INFO: 2018-01-07T09:15:48.759Z: JOB_MESSAGE_DEBUG: (5499febc92abcdf0): Executing wait step start13
root: INFO: 2018-01-07T09:15:48.819Z: JOB_MESSAGE_BASIC: (5499febc92abc0c3): Executing operation group/Create
root: INFO: 2018-01-07T09:15:48.863Z: JOB_MESSAGE_DEBUG: (1661fa3e558ebd28): Starting worker pool setup.
root: INFO: 2018-01-07T09:15:48.902Z: JOB_MESSAGE_BASIC: (1661fa3e558eb6ea): Starting 1 workers in us-central1-f...
root: INFO: 2018-01-07T09:15:48.962Z: JOB_MESSAGE_DEBUG: (c36d30ac2d82356a): Value "group/Session" materialized.
root: INFO: 2018-01-07T09:15:49.028Z: JOB_MESSAGE_BASIC: (c36d30ac2d8239a2): Executing operation read/Read+split+pair_with_one+group/Reify+group/Write
root: INFO: Job 2018-01-07_01_15_44-4526533410717542241 is in state JOB_STATE_RUNNING
root: INFO: 2018-01-07T09:15:55.437Z: JOB_MESSAGE_DETAILED: (c69c34803c2c2fa2): Autoscaling: Raised the number of workers to 0 based on the rate of progress in the currently running step(s).
root: INFO: 2018-01-07T09:16:31.484Z: JOB_MESSAGE_ERROR: (c69c34803c2c2369): Startup of the worker pool in zone us-central1-f failed to bring up any of the desired 1 workers. QUOTA_EXCEEDED: Quota 'DISKS_TOTAL_GB' exceeded.  Limit: 21000.0 in region us-central1.
root: INFO: 2018-01-07T09:16:31.506Z: JOB_MESSAGE_ERROR: (c69c34803c2c2847): Workflow failed.
root: INFO: 2018-01-07T09:16:31.756Z: JOB_MESSAGE_DETAILED: (acca1d5fee8b9e86): Cleaning up.
root: INFO: 2018-01-07T09:16:31.794Z: JOB_MESSAGE_DEBUG: (acca1d5fee8b9a9a): Starting worker pool teardown.
root: INFO: 2018-01-07T09:16:31.815Z: JOB_MESSAGE_BASIC: (acca1d5fee8b98a4): Stopping worker pool...
root: INFO: 2018-01-07T09:17:46.934Z: JOB_MESSAGE_BASIC: (acca1d5fee8b90cc): Worker pool stopped.
root: INFO: 2018-01-07T09:17:46.960Z: JOB_MESSAGE_DEBUG: (acca1d5fee8b9ce0): Tearing down pending resources...
root: INFO: Job 2018-01-07_01_15_44-4526533410717542241 is in state JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
Ran 3 tests in 378.090s

FAILED (errors=2)
Build step 'Execute shell' marked build as failure
Not sending mail to unregistered user ehudm@google.com

Build failed in Jenkins: beam_PostCommit_Python_Verify #3898

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/3898/display/redirect?page=changes>

Changes:

[iemejia] [BEAM-3187] Ensure that teardown is called in case of Exception on

[iemejia] [BEAM-3187] Enable PardoLifecycleTest for the Spark runner

------------------------------------------
[...truncated 1.19 MB...]
                    }, 
                    {
                      "@type": "kind:stream", 
                      "component_encodings": [
                        {
                          "@type": "VarIntCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxhiUWeeSXOIA5XIYNmYyFjbSFTkh4A89cR+g==", 
                          "component_encodings": []
                        }
                      ], 
                      "is_stream_like": true
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "monthly count/GroupByKey.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s2"
        }, 
        "serialized_fn": "%0AJ%22H%0A%1Dref_Coder_GlobalWindowCoder_1%12%27%0A%25%0A%23%0A%21urn%3Abeam%3Acoders%3Aglobal_window%3A0.1jT%0A%25%0A%23%0A%21beam%3Awindowfn%3Aglobal_windows%3Av0.1%10%01%1A%1Dref_Coder_GlobalWindowCoder_1%22%02%3A%00%28%010%018%01H%01", 
        "user_name": "monthly count/GroupByKey"
      }
    }, 
    {
      "kind": "CombineValues", 
      "name": "s4", 
      "properties": {
        "display_data": [], 
        "encoding": {
          "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
          "component_encodings": [
            {
              "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
              "component_encodings": []
            }, 
            {
              "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
              "component_encodings": []
            }
          ], 
          "is_pair_like": true
        }, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "monthly count/Combine.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s3"
        }, 
        "serialized_fn": "<string of 236 bytes>", 
        "user_name": "monthly count/Combine"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s5", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "<lambda>"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "format.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s4"
        }, 
        "serialized_fn": "<string of 1012 bytes>", 
        "user_name": "format"
      }
    }, 
    {
      "kind": "ParallelWrite", 
      "name": "s6", 
      "properties": {
        "create_disposition": "CREATE_IF_NEEDED", 
        "dataset": "BigQueryTornadoesIT", 
        "display_data": [], 
        "encoding": {
          "@type": "kind:windowed_value", 
          "component_encodings": [
            {
              "@type": "RowAsDictJsonCoder$eNprYEpOLEhMzkiNT0pNzNXLzNdLTy7QS8pMLyxNLarkCsovdyx2yUwu8SrOz3POT0kt4ipk0GwsZKwtZErSAwCu1BVY", 
              "component_encodings": []
            }, 
            {
              "@type": "kind:global_window"
            }
          ], 
          "is_wrapper": true
        }, 
        "format": "bigquery", 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s5"
        }, 
        "schema": "{\"fields\": [{\"type\": \"INTEGER\", \"name\": \"month\", \"mode\": \"NULLABLE\"}, {\"type\": \"INTEGER\", \"name\": \"tornado_count\", \"mode\": \"NULLABLE\"}]}", 
        "table": "monthly_tornadoes_1515324806232", 
        "user_name": "Write/WriteToBigQuery/NativeWrite", 
        "write_disposition": "WRITE_TRUNCATE"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
 createTime: u'2018-01-07T11:33:28.014115Z'
 currentStateTime: u'1970-01-01T00:00:00Z'
 id: u'2018-01-07_03_33_27-11099320247198650492'
 location: u'us-central1'
 name: u'beamapp-jenkins-0107113326-306148'
 projectId: u'apache-beam-testing'
 stageStates: []
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2018-01-07_03_33_27-11099320247198650492]
root: INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-01-07_03_33_27-11099320247198650492?project=apache-beam-testing
root: INFO: Job 2018-01-07_03_33_27-11099320247198650492 is in state JOB_STATE_PENDING
root: INFO: 2018-01-07T11:33:27.347Z: JOB_MESSAGE_WARNING: (9a08b4f546939b98): Setting the number of workers (1) disables autoscaling for this job. If you are trying to cap autoscaling, consider only setting max_num_workers. If you want to disable autoscaling altogether, the documented way is to explicitly use autoscalingAlgorithm=NONE.
root: INFO: 2018-01-07T11:33:29.805Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64af80): Checking required Cloud APIs are enabled.
root: INFO: 2018-01-07T11:33:30.588Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64a33b): Expanding CoGroupByKey operations into optimizable parts.
root: INFO: 2018-01-07T11:33:30.627Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64adf0): Expanding GroupByKey operations into optimizable parts.
root: INFO: 2018-01-07T11:33:30.655Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64afbe): Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
root: INFO: 2018-01-07T11:33:30.678Z: JOB_MESSAGE_DEBUG: (dbc90c37ff64a35a): Annotating graph with Autotuner information.
root: INFO: 2018-01-07T11:33:30.801Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64afdd): Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2018-01-07T11:33:30.831Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64a1ab): Fusing consumer months with tornadoes into read
root: INFO: 2018-01-07T11:33:30.853Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64a379): Fusing consumer monthly count/GroupByKey/Reify into monthly count/GroupByKey+monthly count/Combine/Partial
root: INFO: 2018-01-07T11:33:30.874Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64a547): Fusing consumer format into monthly count/Combine/Extract
root: INFO: 2018-01-07T11:33:30.904Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64a715): Fusing consumer monthly count/Combine/Extract into monthly count/Combine
root: INFO: 2018-01-07T11:33:30.935Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64a8e3): Fusing consumer Write/WriteToBigQuery/NativeWrite into format
root: INFO: 2018-01-07T11:33:30.956Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64aab1): Fusing consumer monthly count/Combine into monthly count/GroupByKey/Read
root: INFO: 2018-01-07T11:33:30.980Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64ac7f): Fusing consumer monthly count/GroupByKey+monthly count/Combine/Partial into months with tornadoes
root: INFO: 2018-01-07T11:33:31.010Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64ae4d): Fusing consumer monthly count/GroupByKey/Write into monthly count/GroupByKey/Reify
root: INFO: 2018-01-07T11:33:31.038Z: JOB_MESSAGE_DEBUG: (dbc90c37ff64a01b): Workflow config is missing a default resource spec.
root: INFO: 2018-01-07T11:33:31.067Z: JOB_MESSAGE_DEBUG: (dbc90c37ff64a1e9): Adding StepResource setup and teardown to workflow graph.
root: INFO: 2018-01-07T11:33:31.098Z: JOB_MESSAGE_DEBUG: (dbc90c37ff64a3b7): Adding workflow start and stop steps.
root: INFO: 2018-01-07T11:33:31.124Z: JOB_MESSAGE_DEBUG: (dbc90c37ff64a585): Assigning stage ids.
root: INFO: Job 2018-01-07_03_33_27-11099320247198650492 is in state JOB_STATE_RUNNING
root: INFO: 2018-01-07T11:33:31.247Z: JOB_MESSAGE_DEBUG: (283da39b821315ce): Executing wait step start22
root: INFO: 2018-01-07T11:33:31.304Z: JOB_MESSAGE_BASIC: (283da39b8213180b): Executing operation monthly count/GroupByKey/Create
root: INFO: 2018-01-07T11:33:31.337Z: JOB_MESSAGE_DEBUG: (edd19cb0ff54e80d): Starting worker pool setup.
root: INFO: 2018-01-07T11:33:31.353Z: JOB_MESSAGE_BASIC: (edd19cb0ff54e8f3): Starting 1 workers in us-central1-f...
root: INFO: 2018-01-07T11:33:31.417Z: JOB_MESSAGE_DEBUG: (d3910428d1e201b3): Value "monthly count/GroupByKey/Session" materialized.
root: INFO: 2018-01-07T11:33:31.475Z: JOB_MESSAGE_BASIC: (283da39b82131c85): Executing operation read+months with tornadoes+monthly count/GroupByKey+monthly count/Combine/Partial+monthly count/GroupByKey/Reify+monthly count/GroupByKey/Write
root: INFO: 2018-01-07T11:33:32.133Z: JOB_MESSAGE_BASIC: (d1279e1044256977): BigQuery export job "dataflow_job_15071188470718621061" started. You can check its status with the bq tool: "bq show -j --project_id=clouddataflow-readonly dataflow_job_15071188470718621061".
root: INFO: 2018-01-07T11:33:39.981Z: JOB_MESSAGE_DETAILED: (30048e514e8d64b0): Autoscaling: Raised the number of workers to 0 based on the rate of progress in the currently running step(s).
root: INFO: 2018-01-07T11:34:02.654Z: JOB_MESSAGE_DETAILED: (a76243df36a9b3db): BigQuery export job progress: "dataflow_job_15071188470718621061" observed total of 1 exported files thus far.
root: INFO: 2018-01-07T11:34:02.680Z: JOB_MESSAGE_BASIC: (a76243df36a9b781): BigQuery export job finished: "dataflow_job_15071188470718621061"
root: INFO: 2018-01-07T11:34:19.458Z: JOB_MESSAGE_ERROR: (30048e514e8d6c67): Startup of the worker pool in zone us-central1-f failed to bring up any of the desired 1 workers. QUOTA_EXCEEDED: Quota 'DISKS_TOTAL_GB' exceeded.  Limit: 21000.0 in region us-central1.
root: INFO: 2018-01-07T11:34:19.489Z: JOB_MESSAGE_ERROR: (30048e514e8d6a59): Workflow failed.
root: INFO: 2018-01-07T11:34:19.715Z: JOB_MESSAGE_DETAILED: (dbc90c37ff64a5a4): Cleaning up.
root: INFO: 2018-01-07T11:34:19.766Z: JOB_MESSAGE_DEBUG: (dbc90c37ff64a940): Starting worker pool teardown.
root: INFO: 2018-01-07T11:34:19.790Z: JOB_MESSAGE_BASIC: (dbc90c37ff64ab0e): Stopping worker pool...
root: INFO: 2018-01-07T11:35:35.366Z: JOB_MESSAGE_DEBUG: (dbc90c37ff64a414): Tearing down pending resources...
root: INFO: Job 2018-01-07_03_33_27-11099320247198650492 is in state JOB_STATE_FAILED
root: INFO: Clean up a Bigquery table with project: apache-beam-testing, dataset: BigQueryTornadoesIT, table: monthly_tornadoes_1515324806232.
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
google_auth_httplib2: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
google_auth_httplib2: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
Ran 3 tests in 138.884s

FAILED (errors=4)
Build step 'Execute shell' marked build as failure
Not sending mail to unregistered user ehudm@google.com