You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2017/05/25 16:05:00 UTC

Build failed in Jenkins: beam_PostCommit_Python_Verify #2315

See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/2315/display/redirect>

------------------------------------------
[...truncated 576.69 KB...]
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "kind:windowed_value", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": [
                        {
                          "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                          "component_encodings": []
                        }, 
                        {
                          "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                          "component_encodings": []
                        }
                      ], 
                      "is_pair_like": true
                    }, 
                    {
                      "@type": "kind:global_window"
                    }
                  ], 
                  "is_wrapper": true
                }
              ]
            }, 
            "output_name": "out", 
            "user_name": "write/Write/WriteImpl/FinalizeWrite/SideInput-s16.output"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s14"
        }, 
        "user_name": "write/Write/WriteImpl/FinalizeWrite/SideInput-s16"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s17", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "_finalize_write"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {
          "SideInput-s15": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "SideInput-s15"
          }, 
          "SideInput-s16": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "SideInput-s16"
          }
        }, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "write/Write/WriteImpl/FinalizeWrite.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s7"
        }, 
        "serialized_fn": "<string of 1056 bytes>", 
        "user_name": "write/Write/WriteImpl/FinalizeWrite/Do"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
 createTime: u'2017-05-25T15:59:18.434829Z'
 currentStateTime: u'1970-01-01T00:00:00Z'
 id: u'2017-05-25_08_59_17-15727091547768182389'
 location: u'global'
 name: u'beamapp-jenkins-0525155916-528721'
 projectId: u'apache-beam-testing'
 stageStates: []
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2017-05-25_08_59_17-15727091547768182389]
root: INFO: To access the Dataflow monitoring console, please navigate to https://console.developers.google.com/project/apache-beam-testing/dataflow/job/2017-05-25_08_59_17-15727091547768182389
root: INFO: Job 2017-05-25_08_59_17-15727091547768182389 is in state JOB_STATE_RUNNING
root: INFO: 2017-05-25T15:59:17.847Z: JOB_MESSAGE_WARNING: (da41da6fdec55971): Setting the number of workers (1) disables autoscaling for this job. If you are trying to cap autoscaling, consider only setting max_num_workers. If you want to disable autoscaling altogether, the documented way is to explicitly use autoscalingAlgorithm=NONE.
root: INFO: 2017-05-25T15:59:19.991Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0a26): Checking required Cloud APIs are enabled.
root: INFO: 2017-05-25T15:59:21.027Z: JOB_MESSAGE_DEBUG: (b860003e1b5a0a51): Combiner lifting skipped for step write/Write/WriteImpl/GroupByKey: GroupByKey not followed by a combiner.
root: INFO: 2017-05-25T15:59:21.029Z: JOB_MESSAGE_DEBUG: (b860003e1b5a0e9b): Combiner lifting skipped for step group: GroupByKey not followed by a combiner.
root: INFO: 2017-05-25T15:59:21.031Z: JOB_MESSAGE_DETAILED: (b860003e1b5a02e5): Expanding GroupByKey operations into optimizable parts.
root: INFO: 2017-05-25T15:59:21.034Z: JOB_MESSAGE_DETAILED: (b860003e1b5a072f): Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
root: INFO: 2017-05-25T15:59:21.041Z: JOB_MESSAGE_DEBUG: (b860003e1b5a040d): Annotating graph with Autotuner information.
root: INFO: 2017-05-25T15:59:21.053Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0ca1): Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2017-05-25T15:59:21.056Z: JOB_MESSAGE_DETAILED: (b860003e1b5a00eb): Fusing consumer split into read/Read
root: INFO: 2017-05-25T15:59:21.058Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0535): Fusing consumer group/Write into group/Reify
root: INFO: 2017-05-25T15:59:21.060Z: JOB_MESSAGE_DETAILED: (b860003e1b5a097f): Fusing consumer group/GroupByWindow into group/Read
root: INFO: 2017-05-25T15:59:21.063Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0dc9): Fusing consumer write/Write/WriteImpl/GroupByKey/GroupByWindow into write/Write/WriteImpl/GroupByKey/Read
root: INFO: 2017-05-25T15:59:21.066Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0213): Fusing consumer write/Write/WriteImpl/GroupByKey/Write into write/Write/WriteImpl/GroupByKey/Reify
root: INFO: 2017-05-25T15:59:21.072Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0aa7): Fusing consumer write/Write/WriteImpl/WindowInto(WindowIntoFn) into write/Write/WriteImpl/Pair
root: INFO: 2017-05-25T15:59:21.074Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0ef1): Fusing consumer write/Write/WriteImpl/GroupByKey/Reify into write/Write/WriteImpl/WindowInto(WindowIntoFn)
root: INFO: 2017-05-25T15:59:21.077Z: JOB_MESSAGE_DETAILED: (b860003e1b5a033b): Fusing consumer pair_with_one into split
root: INFO: 2017-05-25T15:59:21.079Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0785): Fusing consumer group/Reify into pair_with_one
root: INFO: 2017-05-25T15:59:21.081Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0bcf): Fusing consumer write/Write/WriteImpl/WriteBundles/Do into format
root: INFO: 2017-05-25T15:59:21.084Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0019): Fusing consumer write/Write/WriteImpl/Pair into write/Write/WriteImpl/WriteBundles/Do
root: INFO: 2017-05-25T15:59:21.086Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0463): Fusing consumer format into count
root: INFO: 2017-05-25T15:59:21.088Z: JOB_MESSAGE_DETAILED: (b860003e1b5a08ad): Fusing consumer write/Write/WriteImpl/Extract into write/Write/WriteImpl/GroupByKey/GroupByWindow
root: INFO: 2017-05-25T15:59:21.091Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0cf7): Fusing consumer count into group/GroupByWindow
root: INFO: 2017-05-25T15:59:21.100Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0e1f): Fusing consumer write/Write/WriteImpl/InitializeWrite into write/Write/WriteImpl/DoOnce/Read
root: INFO: 2017-05-25T15:59:21.175Z: JOB_MESSAGE_DEBUG: (b860003e1b5a0ecb): Workflow config is missing a default resource spec.
root: INFO: 2017-05-25T15:59:21.178Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0315): Adding StepResource setup and teardown to workflow graph.
root: INFO: 2017-05-25T15:59:21.180Z: JOB_MESSAGE_DEBUG: (b860003e1b5a075f): Adding workflow start and stop steps.
root: INFO: 2017-05-25T15:59:21.183Z: JOB_MESSAGE_DEBUG: (b860003e1b5a0ba9): Assigning stage ids.
root: INFO: 2017-05-25T15:59:21.223Z: JOB_MESSAGE_DEBUG: (3c6c761f21815370): Executing wait step start25
root: INFO: 2017-05-25T15:59:21.231Z: JOB_MESSAGE_BASIC: (3c6c761f2181586e): Executing operation write/Write/WriteImpl/DoOnce/Read+write/Write/WriteImpl/InitializeWrite
root: INFO: 2017-05-25T15:59:21.234Z: JOB_MESSAGE_BASIC: (741bbbb8c7e5509e): Executing operation group/Create
root: INFO: 2017-05-25T15:59:21.435Z: JOB_MESSAGE_DEBUG: (5b2b3e4c17869251): Starting worker pool setup.
root: INFO: 2017-05-25T15:59:21.437Z: JOB_MESSAGE_BASIC: (5b2b3e4c1786971f): Starting 1 workers...
root: INFO: 2017-05-25T15:59:21.449Z: JOB_MESSAGE_DEBUG: (741bbbb8c7e557cd): Value "group/Session" materialized.
root: INFO: 2017-05-25T15:59:21.458Z: JOB_MESSAGE_BASIC: (741bbbb8c7e55385): Executing operation read/Read+split+pair_with_one+group/Reify+group/Write
root: INFO: 2017-05-25T16:00:22.421Z: JOB_MESSAGE_DETAILED: (d1d7a890d50c72d6): Workers have started successfully.
root: INFO: 2017-05-25T16:02:48.300Z: JOB_MESSAGE_ERROR: (7a3d72f03b4d4408): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 706, in run
    self._load_main_session(self.local_staging_directory)
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 446, in _load_main_session
    pickler.load_session(session_file)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 247, in load_session
    return dill.load_session(file_path)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in load_session
    module = unpickler.load()
  File "/usr/lib/python2.7/pickle.py", line 858, in load
    dispatch[key](self)
  File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
    value = func(*args)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in _import_module
    return __import__(import_name)
ImportError: No module named gen_protos

root: INFO: 2017-05-25T16:02:50.353Z: JOB_MESSAGE_ERROR: (7a3d72f03b4d4b40): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 706, in run
    self._load_main_session(self.local_staging_directory)
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 446, in _load_main_session
    pickler.load_session(session_file)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 247, in load_session
    return dill.load_session(file_path)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in load_session
    module = unpickler.load()
  File "/usr/lib/python2.7/pickle.py", line 858, in load
    dispatch[key](self)
  File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
    value = func(*args)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in _import_module
    return __import__(import_name)
ImportError: No module named gen_protos

root: INFO: 2017-05-25T16:02:52.407Z: JOB_MESSAGE_ERROR: (7a3d72f03b4d4278): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 706, in run
    self._load_main_session(self.local_staging_directory)
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 446, in _load_main_session
    pickler.load_session(session_file)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 247, in load_session
    return dill.load_session(file_path)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in load_session
    module = unpickler.load()
  File "/usr/lib/python2.7/pickle.py", line 858, in load
    dispatch[key](self)
  File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
    value = func(*args)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in _import_module
    return __import__(import_name)
ImportError: No module named gen_protos

root: INFO: 2017-05-25T16:02:54.460Z: JOB_MESSAGE_ERROR: (7a3d72f03b4d49b0): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 706, in run
    self._load_main_session(self.local_staging_directory)
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 446, in _load_main_session
    pickler.load_session(session_file)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 247, in load_session
    return dill.load_session(file_path)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in load_session
    module = unpickler.load()
  File "/usr/lib/python2.7/pickle.py", line 858, in load
    dispatch[key](self)
  File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
    value = func(*args)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in _import_module
    return __import__(import_name)
ImportError: No module named gen_protos

root: INFO: 2017-05-25T16:02:54.610Z: JOB_MESSAGE_DEBUG: (741bbbb8c7e551a2): Executing failure step failure24
root: INFO: 2017-05-25T16:02:54.640Z: JOB_MESSAGE_ERROR: (741bbbb8c7e55890): Workflow failed. Causes: (741bbbb8c7e553c6): S05:read/Read+split+pair_with_one+group/Reify+group/Write failed., (317ee513ba6bc569): Failed to split source.
root: INFO: 2017-05-25T16:02:54.721Z: JOB_MESSAGE_DETAILED: (b860003e1b5a0340): Cleaning up.
root: INFO: 2017-05-25T16:02:54.809Z: JOB_MESSAGE_DEBUG: (b860003e1b5a078a): Starting worker pool teardown.
root: INFO: 2017-05-25T16:02:54.811Z: JOB_MESSAGE_BASIC: (b860003e1b5a0bd4): Stopping worker pool...
root: INFO: 2017-05-25T16:04:09.828Z: JOB_MESSAGE_BASIC: (b860003e1b5a0243): Worker pool stopped.
root: INFO: 2017-05-25T16:04:09.866Z: JOB_MESSAGE_DEBUG: (b860003e1b5a036b): Tearing down pending resources...
root: INFO: Job 2017-05-25_08_59_17-15727091547768182389 is in state JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
Ran 2 tests in 343.955s

FAILED (errors=1)
Found: https://console.cloud.google.com/dataflow/job/2017-05-25_08_59_16-12120469644462099244?project=apache-beam-testing
Found: https://console.cloud.google.com/dataflow/job/2017-05-25_08_59_17-15727091547768182389?project=apache-beam-testing
Build step 'Execute shell' marked build as failure

Jenkins build is back to normal : beam_PostCommit_Python_Verify #2320

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/2320/display/redirect>


Build failed in Jenkins: beam_PostCommit_Python_Verify #2319

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/2319/display/redirect>

------------------------------------------
Started by GitHub push by asfgit
[EnvInject] - Loading node environment variables.
Building remotely on beam3 (beam) in workspace <https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/>
 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* +refs/pull/*:refs/remotes/origin/pr/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 7bb10a6942d3d1859d93180d31a2f0ad5ee8a517 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 7bb10a6942d3d1859d93180d31a2f0ad5ee8a517
 > git rev-list a03a638e00d802a15c12671617e714a026c07552 # timeout=10
Cleaning workspace
 > git rev-parse --verify HEAD # timeout=10
Resetting working tree
 > git reset --hard # timeout=10
 > git clean -fdx # timeout=10
FATAL: Command "git clean -fdx" returned status code 128:
stdout: 
stderr: fatal: Not a git repository: sdks/python/.eggs/grpcio-wheels-build/grpcio/third_party/boringssl/../../.git/modules/third_party/boringssl

hudson.plugins.git.GitException: Command "git clean -fdx" returned status code 128:
stdout: 
stderr: fatal: Not a git repository: sdks/python/.eggs/grpcio-wheels-build/grpcio/third_party/boringssl/../../.git/modules/third_party/boringssl

	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:1877)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:1845)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:1841)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommand(CliGitAPIImpl.java:1486)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommand(CliGitAPIImpl.java:1498)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.clean(CliGitAPIImpl.java:711)
	at hudson.plugins.git.GitAPI.clean(GitAPI.java:311)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at hudson.remoting.RemoteInvocationHandler$RPCRequest.perform(RemoteInvocationHandler.java:895)
	at hudson.remoting.RemoteInvocationHandler$RPCRequest.call(RemoteInvocationHandler.java:870)
	at hudson.remoting.RemoteInvocationHandler$RPCRequest.call(RemoteInvocationHandler.java:829)
	at hudson.remoting.UserRequest.perform(UserRequest.java:153)
	at hudson.remoting.UserRequest.perform(UserRequest.java:50)
	at hudson.remoting.Request$2.run(Request.java:336)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:68)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
	at ......remote call to beam3(Native Method)
	at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1545)
	at hudson.remoting.UserResponse.retrieve(UserRequest.java:253)
	at hudson.remoting.Channel.call(Channel.java:830)
	at hudson.remoting.RemoteInvocationHandler.invoke(RemoteInvocationHandler.java:257)
	at com.sun.proxy.$Proxy102.clean(Unknown Source)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl.clean(RemoteGitImpl.java:450)
	at hudson.plugins.git.extensions.impl.CleanCheckout.onCheckoutCompleted(CleanCheckout.java:28)
	at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1176)
	at hudson.scm.SCM.checkout(SCM.java:496)
	at hudson.model.AbstractProject.checkout(AbstractProject.java:1281)
	at hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:604)
	at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
	at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:529)
	at hudson.model.Run.execute(Run.java:1728)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:98)
	at hudson.model.Executor.run(Executor.java:405)

Build failed in Jenkins: beam_PostCommit_Python_Verify #2318

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/2318/display/redirect>

------------------------------------------
Started by user robertwb
[EnvInject] - Loading node environment variables.
Building remotely on beam3 (beam) in workspace <https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/>
 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* +refs/pull/*:refs/remotes/origin/pr/*
 > git rev-parse 3b7eae4040eba65e8a6167eb802849273eca588b^{commit} # timeout=10
Checking out Revision 3b7eae4040eba65e8a6167eb802849273eca588b (detached)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 3b7eae4040eba65e8a6167eb802849273eca588b
 > git rev-list c79c5acd20e76685c8d427ef8d95b5b01cf92620 # timeout=10
Cleaning workspace
 > git rev-parse --verify HEAD # timeout=10
Resetting working tree
 > git reset --hard # timeout=10
 > git clean -fdx # timeout=10
FATAL: Command "git clean -fdx" returned status code 128:
stdout: 
stderr: fatal: Not a git repository: sdks/python/.eggs/grpcio-wheels-build/grpcio/third_party/boringssl/../../.git/modules/third_party/boringssl

hudson.plugins.git.GitException: Command "git clean -fdx" returned status code 128:
stdout: 
stderr: fatal: Not a git repository: sdks/python/.eggs/grpcio-wheels-build/grpcio/third_party/boringssl/../../.git/modules/third_party/boringssl

	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:1877)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:1845)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:1841)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommand(CliGitAPIImpl.java:1486)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommand(CliGitAPIImpl.java:1498)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.clean(CliGitAPIImpl.java:711)
	at hudson.plugins.git.GitAPI.clean(GitAPI.java:311)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at hudson.remoting.RemoteInvocationHandler$RPCRequest.perform(RemoteInvocationHandler.java:895)
	at hudson.remoting.RemoteInvocationHandler$RPCRequest.call(RemoteInvocationHandler.java:870)
	at hudson.remoting.RemoteInvocationHandler$RPCRequest.call(RemoteInvocationHandler.java:829)
	at hudson.remoting.UserRequest.perform(UserRequest.java:153)
	at hudson.remoting.UserRequest.perform(UserRequest.java:50)
	at hudson.remoting.Request$2.run(Request.java:336)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:68)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
	at ......remote call to beam3(Native Method)
	at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1545)
	at hudson.remoting.UserResponse.retrieve(UserRequest.java:253)
	at hudson.remoting.Channel.call(Channel.java:830)
	at hudson.remoting.RemoteInvocationHandler.invoke(RemoteInvocationHandler.java:257)
	at com.sun.proxy.$Proxy102.clean(Unknown Source)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl.clean(RemoteGitImpl.java:450)
	at hudson.plugins.git.extensions.impl.CleanCheckout.onCheckoutCompleted(CleanCheckout.java:28)
	at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1176)
	at hudson.scm.SCM.checkout(SCM.java:496)
	at hudson.model.AbstractProject.checkout(AbstractProject.java:1281)
	at hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:604)
	at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
	at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:529)
	at hudson.model.Run.execute(Run.java:1728)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:98)
	at hudson.model.Executor.run(Executor.java:405)

Build failed in Jenkins: beam_PostCommit_Python_Verify #2317

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/2317/display/redirect?page=changes>

Changes:

[robertwb] More robust gen_protos on jenkins.

------------------------------------------
[...truncated 576.70 KB...]
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "kind:windowed_value", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": [
                        {
                          "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                          "component_encodings": []
                        }, 
                        {
                          "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                          "component_encodings": []
                        }
                      ], 
                      "is_pair_like": true
                    }, 
                    {
                      "@type": "kind:global_window"
                    }
                  ], 
                  "is_wrapper": true
                }
              ]
            }, 
            "output_name": "out", 
            "user_name": "write/Write/WriteImpl/FinalizeWrite/SideInput-s16.output"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s14"
        }, 
        "user_name": "write/Write/WriteImpl/FinalizeWrite/SideInput-s16"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s17", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "_finalize_write"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {
          "SideInput-s15": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "SideInput-s15"
          }, 
          "SideInput-s16": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "SideInput-s16"
          }
        }, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "write/Write/WriteImpl/FinalizeWrite.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s7"
        }, 
        "serialized_fn": "<string of 1056 bytes>", 
        "user_name": "write/Write/WriteImpl/FinalizeWrite/Do"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
 createTime: u'2017-05-25T20:33:52.834648Z'
 currentStateTime: u'1970-01-01T00:00:00Z'
 id: u'2017-05-25_13_33_52-10908932325153537174'
 location: u'global'
 name: u'beamapp-jenkins-0525203351-247367'
 projectId: u'apache-beam-testing'
 stageStates: []
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2017-05-25_13_33_52-10908932325153537174]
root: INFO: To access the Dataflow monitoring console, please navigate to https://console.developers.google.com/project/apache-beam-testing/dataflow/job/2017-05-25_13_33_52-10908932325153537174
root: INFO: Job 2017-05-25_13_33_52-10908932325153537174 is in state JOB_STATE_RUNNING
root: INFO: 2017-05-25T20:33:52.299Z: JOB_MESSAGE_WARNING: (97645026d78d7dd2): Setting the number of workers (1) disables autoscaling for this job. If you are trying to cap autoscaling, consider only setting max_num_workers. If you want to disable autoscaling altogether, the documented way is to explicitly use autoscalingAlgorithm=NONE.
root: INFO: 2017-05-25T20:33:54.443Z: JOB_MESSAGE_DETAILED: (fde36659798150df): Checking required Cloud APIs are enabled.
root: INFO: 2017-05-25T20:33:55.459Z: JOB_MESSAGE_DEBUG: (fde366597981550e): Combiner lifting skipped for step write/Write/WriteImpl/GroupByKey: GroupByKey not followed by a combiner.
root: INFO: 2017-05-25T20:33:55.461Z: JOB_MESSAGE_DEBUG: (fde36659798158d0): Combiner lifting skipped for step group: GroupByKey not followed by a combiner.
root: INFO: 2017-05-25T20:33:55.464Z: JOB_MESSAGE_DETAILED: (fde3665979815c92): Expanding GroupByKey operations into optimizable parts.
root: INFO: 2017-05-25T20:33:55.468Z: JOB_MESSAGE_DETAILED: (fde3665979815054): Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
root: INFO: 2017-05-25T20:33:55.476Z: JOB_MESSAGE_DEBUG: (fde3665979815b9a): Annotating graph with Autotuner information.
root: INFO: 2017-05-25T20:33:55.490Z: JOB_MESSAGE_DETAILED: (fde366597981531e): Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2017-05-25T20:33:55.493Z: JOB_MESSAGE_DETAILED: (fde36659798156e0): Fusing consumer split into read/Read
root: INFO: 2017-05-25T20:33:55.495Z: JOB_MESSAGE_DETAILED: (fde3665979815aa2): Fusing consumer group/Write into group/Reify
root: INFO: 2017-05-25T20:33:55.498Z: JOB_MESSAGE_DETAILED: (fde3665979815e64): Fusing consumer group/GroupByWindow into group/Read
root: INFO: 2017-05-25T20:33:55.502Z: JOB_MESSAGE_DETAILED: (fde3665979815226): Fusing consumer write/Write/WriteImpl/GroupByKey/GroupByWindow into write/Write/WriteImpl/GroupByKey/Read
root: INFO: 2017-05-25T20:33:55.504Z: JOB_MESSAGE_DETAILED: (fde36659798155e8): Fusing consumer write/Write/WriteImpl/GroupByKey/Write into write/Write/WriteImpl/GroupByKey/Reify
root: INFO: 2017-05-25T20:33:55.509Z: JOB_MESSAGE_DETAILED: (fde3665979815d6c): Fusing consumer write/Write/WriteImpl/WindowInto(WindowIntoFn) into write/Write/WriteImpl/Pair
root: INFO: 2017-05-25T20:33:55.512Z: JOB_MESSAGE_DETAILED: (fde366597981512e): Fusing consumer write/Write/WriteImpl/GroupByKey/Reify into write/Write/WriteImpl/WindowInto(WindowIntoFn)
root: INFO: 2017-05-25T20:33:55.516Z: JOB_MESSAGE_DETAILED: (fde36659798154f0): Fusing consumer pair_with_one into split
root: INFO: 2017-05-25T20:33:55.518Z: JOB_MESSAGE_DETAILED: (fde36659798158b2): Fusing consumer group/Reify into pair_with_one
root: INFO: 2017-05-25T20:33:55.521Z: JOB_MESSAGE_DETAILED: (fde3665979815c74): Fusing consumer write/Write/WriteImpl/WriteBundles/Do into format
root: INFO: 2017-05-25T20:33:55.526Z: JOB_MESSAGE_DETAILED: (fde3665979815036): Fusing consumer write/Write/WriteImpl/Pair into write/Write/WriteImpl/WriteBundles/Do
root: INFO: 2017-05-25T20:33:55.529Z: JOB_MESSAGE_DETAILED: (fde36659798153f8): Fusing consumer format into count
root: INFO: 2017-05-25T20:33:55.532Z: JOB_MESSAGE_DETAILED: (fde36659798157ba): Fusing consumer write/Write/WriteImpl/Extract into write/Write/WriteImpl/GroupByKey/GroupByWindow
root: INFO: 2017-05-25T20:33:55.535Z: JOB_MESSAGE_DETAILED: (fde3665979815b7c): Fusing consumer count into group/GroupByWindow
root: INFO: 2017-05-25T20:33:55.549Z: JOB_MESSAGE_DETAILED: (fde3665979815a84): Fusing consumer write/Write/WriteImpl/InitializeWrite into write/Write/WriteImpl/DoOnce/Read
root: INFO: 2017-05-25T20:33:55.643Z: JOB_MESSAGE_DEBUG: (fde3665979815b40): Workflow config is missing a default resource spec.
root: INFO: 2017-05-25T20:33:55.647Z: JOB_MESSAGE_DETAILED: (fde3665979815f02): Adding StepResource setup and teardown to workflow graph.
root: INFO: 2017-05-25T20:33:55.651Z: JOB_MESSAGE_DEBUG: (fde36659798152c4): Adding workflow start and stop steps.
root: INFO: 2017-05-25T20:33:55.654Z: JOB_MESSAGE_DEBUG: (fde3665979815686): Assigning stage ids.
root: INFO: 2017-05-25T20:33:55.698Z: JOB_MESSAGE_DEBUG: (e0f7eccdcc6dc18): Executing wait step start25
root: INFO: 2017-05-25T20:33:55.711Z: JOB_MESSAGE_BASIC: (e0f7eccdcc6d3fa): Executing operation write/Write/WriteImpl/DoOnce/Read+write/Write/WriteImpl/InitializeWrite
root: INFO: 2017-05-25T20:33:55.716Z: JOB_MESSAGE_BASIC: (5649c2f38b72d0fb): Executing operation group/Create
root: INFO: 2017-05-25T20:33:55.918Z: JOB_MESSAGE_DEBUG: (e463699c10aeac4f): Starting worker pool setup.
root: INFO: 2017-05-25T20:33:55.921Z: JOB_MESSAGE_BASIC: (e463699c10aea855): Starting 1 workers...
root: INFO: 2017-05-25T20:33:55.941Z: JOB_MESSAGE_DEBUG: (5649c2f38b72dac4): Value "group/Session" materialized.
root: INFO: 2017-05-25T20:33:55.952Z: JOB_MESSAGE_BASIC: (5649c2f38b72d8cc): Executing operation read/Read+split+pair_with_one+group/Reify+group/Write
root: INFO: 2017-05-25T20:36:47.990Z: JOB_MESSAGE_DETAILED: (ec660d236c030370): Workers have started successfully.
root: INFO: 2017-05-25T20:38:35.245Z: JOB_MESSAGE_ERROR: (9eea00070bd5cecd): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 706, in run
    self._load_main_session(self.local_staging_directory)
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 446, in _load_main_session
    pickler.load_session(session_file)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 247, in load_session
    return dill.load_session(file_path)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in load_session
    module = unpickler.load()
  File "/usr/lib/python2.7/pickle.py", line 858, in load
    dispatch[key](self)
  File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
    value = func(*args)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in _import_module
    return __import__(import_name)
ImportError: No module named gen_protos

root: INFO: 2017-05-25T20:38:37.315Z: JOB_MESSAGE_ERROR: (9eea00070bd5cf65): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 706, in run
    self._load_main_session(self.local_staging_directory)
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 446, in _load_main_session
    pickler.load_session(session_file)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 247, in load_session
    return dill.load_session(file_path)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in load_session
    module = unpickler.load()
  File "/usr/lib/python2.7/pickle.py", line 858, in load
    dispatch[key](self)
  File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
    value = func(*args)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in _import_module
    return __import__(import_name)
ImportError: No module named gen_protos

root: INFO: 2017-05-25T20:38:39.377Z: JOB_MESSAGE_ERROR: (9eea00070bd5cffd): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 706, in run
    self._load_main_session(self.local_staging_directory)
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 446, in _load_main_session
    pickler.load_session(session_file)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 247, in load_session
    return dill.load_session(file_path)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in load_session
    module = unpickler.load()
  File "/usr/lib/python2.7/pickle.py", line 858, in load
    dispatch[key](self)
  File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
    value = func(*args)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in _import_module
    return __import__(import_name)
ImportError: No module named gen_protos

root: INFO: 2017-05-25T20:38:41.442Z: JOB_MESSAGE_ERROR: (9eea00070bd5c095): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 706, in run
    self._load_main_session(self.local_staging_directory)
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 446, in _load_main_session
    pickler.load_session(session_file)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 247, in load_session
    return dill.load_session(file_path)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in load_session
    module = unpickler.load()
  File "/usr/lib/python2.7/pickle.py", line 858, in load
    dispatch[key](self)
  File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
    value = func(*args)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in _import_module
    return __import__(import_name)
ImportError: No module named gen_protos

root: INFO: 2017-05-25T20:38:42.686Z: JOB_MESSAGE_DEBUG: (e0f7eccdcc6d705): Executing failure step failure24
root: INFO: 2017-05-25T20:38:42.689Z: JOB_MESSAGE_ERROR: (e0f7eccdcc6defb): Workflow failed. Causes: (e0f7eccdcc6d719): S01:write/Write/WriteImpl/DoOnce/Read+write/Write/WriteImpl/InitializeWrite failed., (134af2bc2c846c94): Failed to split source.
root: INFO: 2017-05-25T20:38:42.763Z: JOB_MESSAGE_DETAILED: (fde3665979815331): Cleaning up.
root: INFO: 2017-05-25T20:38:42.769Z: JOB_MESSAGE_DEBUG: (fde36659798156f3): Starting worker pool teardown.
root: INFO: 2017-05-25T20:38:42.774Z: JOB_MESSAGE_BASIC: (fde3665979815ab5): Stopping worker pool...
root: INFO: 2017-05-25T20:40:02.786Z: JOB_MESSAGE_BASIC: (fde3665979815858): Worker pool stopped.
root: INFO: 2017-05-25T20:40:02.831Z: JOB_MESSAGE_DEBUG: (fde3665979815760): Tearing down pending resources...
root: INFO: Job 2017-05-25_13_33_52-10908932325153537174 is in state JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
Ran 2 tests in 377.787s

FAILED (errors=1)
Found: https://console.cloud.google.com/dataflow/job/2017-05-25_13_33_52-10908932325153537174?project=apache-beam-testing
Found: https://console.cloud.google.com/dataflow/job/2017-05-25_13_33_52-12774029545839592167?project=apache-beam-testing
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: beam_PostCommit_Python_Verify #2316

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/2316/display/redirect?page=changes>

Changes:

[iemejia] Update maven-dependency-plugin to version 3.0.1

------------------------------------------
[...truncated 576.03 KB...]
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "kind:windowed_value", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": [
                        {
                          "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                          "component_encodings": []
                        }, 
                        {
                          "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                          "component_encodings": []
                        }
                      ], 
                      "is_pair_like": true
                    }, 
                    {
                      "@type": "kind:global_window"
                    }
                  ], 
                  "is_wrapper": true
                }
              ]
            }, 
            "output_name": "out", 
            "user_name": "write/Write/WriteImpl/FinalizeWrite/SideInput-s16.output"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s14"
        }, 
        "user_name": "write/Write/WriteImpl/FinalizeWrite/SideInput-s16"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s17", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "_finalize_write"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {
          "SideInput-s15": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "SideInput-s15"
          }, 
          "SideInput-s16": {
            "@type": "OutputReference", 
            "output_name": "out", 
            "step_name": "SideInput-s16"
          }
        }, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "write/Write/WriteImpl/FinalizeWrite.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s7"
        }, 
        "serialized_fn": "<string of 1056 bytes>", 
        "user_name": "write/Write/WriteImpl/FinalizeWrite/Do"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
 createTime: u'2017-05-25T17:56:08.949185Z'
 currentStateTime: u'1970-01-01T00:00:00Z'
 id: u'2017-05-25_10_56_08-15254750624546520082'
 location: u'global'
 name: u'beamapp-jenkins-0525175607-304078'
 projectId: u'apache-beam-testing'
 stageStates: []
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2017-05-25_10_56_08-15254750624546520082]
root: INFO: To access the Dataflow monitoring console, please navigate to https://console.developers.google.com/project/apache-beam-testing/dataflow/job/2017-05-25_10_56_08-15254750624546520082
root: INFO: Job 2017-05-25_10_56_08-15254750624546520082 is in state JOB_STATE_RUNNING
root: INFO: 2017-05-25T17:56:08.374Z: JOB_MESSAGE_WARNING: (d3b3c2deb1437c0e): Setting the number of workers (1) disables autoscaling for this job. If you are trying to cap autoscaling, consider only setting max_num_workers. If you want to disable autoscaling altogether, the documented way is to explicitly use autoscalingAlgorithm=NONE.
root: INFO: 2017-05-25T17:56:10.568Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d068): Checking required Cloud APIs are enabled.
root: INFO: 2017-05-25T17:56:11.791Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1de57): Combiner lifting skipped for step write/Write/WriteImpl/GroupByKey: GroupByKey not followed by a combiner.
root: INFO: 2017-05-25T17:56:11.793Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1d699): Combiner lifting skipped for step group: GroupByKey not followed by a combiner.
root: INFO: 2017-05-25T17:56:11.796Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1dedb): Expanding GroupByKey operations into optimizable parts.
root: INFO: 2017-05-25T17:56:11.798Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d71d): Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
root: INFO: 2017-05-25T17:56:11.804Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1dfe3): Annotating graph with Autotuner information.
root: INFO: 2017-05-25T17:56:11.817Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d067): Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2017-05-25T17:56:11.819Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d8a9): Fusing consumer split into read/Read
root: INFO: 2017-05-25T17:56:11.821Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d0eb): Fusing consumer group/Write into group/Reify
root: INFO: 2017-05-25T17:56:11.823Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d92d): Fusing consumer group/GroupByWindow into group/Read
root: INFO: 2017-05-25T17:56:11.825Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d16f): Fusing consumer write/Write/WriteImpl/GroupByKey/GroupByWindow into write/Write/WriteImpl/GroupByKey/Read
root: INFO: 2017-05-25T17:56:11.827Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d9b1): Fusing consumer write/Write/WriteImpl/GroupByKey/Write into write/Write/WriteImpl/GroupByKey/Reify
root: INFO: 2017-05-25T17:56:11.832Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1da35): Fusing consumer write/Write/WriteImpl/WindowInto(WindowIntoFn) into write/Write/WriteImpl/Pair
root: INFO: 2017-05-25T17:56:11.834Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d277): Fusing consumer write/Write/WriteImpl/GroupByKey/Reify into write/Write/WriteImpl/WindowInto(WindowIntoFn)
root: INFO: 2017-05-25T17:56:11.836Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1dab9): Fusing consumer pair_with_one into split
root: INFO: 2017-05-25T17:56:11.838Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d2fb): Fusing consumer group/Reify into pair_with_one
root: INFO: 2017-05-25T17:56:11.840Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1db3d): Fusing consumer write/Write/WriteImpl/WriteBundles/Do into format
root: INFO: 2017-05-25T17:56:11.842Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d37f): Fusing consumer write/Write/WriteImpl/Pair into write/Write/WriteImpl/WriteBundles/Do
root: INFO: 2017-05-25T17:56:11.845Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1dbc1): Fusing consumer format into count
root: INFO: 2017-05-25T17:56:11.847Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d403): Fusing consumer write/Write/WriteImpl/Extract into write/Write/WriteImpl/GroupByKey/GroupByWindow
root: INFO: 2017-05-25T17:56:11.850Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1dc45): Fusing consumer count into group/GroupByWindow
root: INFO: 2017-05-25T17:56:11.859Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1dd4d): Fusing consumer write/Write/WriteImpl/InitializeWrite into write/Write/WriteImpl/DoOnce/Read
root: INFO: 2017-05-25T17:56:11.931Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1d509): Workflow config is missing a default resource spec.
root: INFO: 2017-05-25T17:56:11.935Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1dd4b): Adding StepResource setup and teardown to workflow graph.
root: INFO: 2017-05-25T17:56:11.938Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1d58d): Adding workflow start and stop steps.
root: INFO: 2017-05-25T17:56:11.942Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1ddcf): Assigning stage ids.
root: INFO: 2017-05-25T17:56:11.982Z: JOB_MESSAGE_DEBUG: (7a4c9a7af0e47d65): Executing wait step start25
root: INFO: 2017-05-25T17:56:11.992Z: JOB_MESSAGE_BASIC: (7a4c9a7af0e47353): Executing operation write/Write/WriteImpl/DoOnce/Read+write/Write/WriteImpl/InitializeWrite
root: INFO: 2017-05-25T17:56:11.995Z: JOB_MESSAGE_BASIC: (ce3f979b8433dae4): Executing operation group/Create
root: INFO: 2017-05-25T17:56:12.197Z: JOB_MESSAGE_DEBUG: (43e85710b9452efe): Starting worker pool setup.
root: INFO: 2017-05-25T17:56:12.199Z: JOB_MESSAGE_BASIC: (43e85710b9452694): Starting 1 workers...
root: INFO: 2017-05-25T17:56:12.213Z: JOB_MESSAGE_DEBUG: (ce3f979b8433dc81): Value "group/Session" materialized.
root: INFO: 2017-05-25T17:56:12.224Z: JOB_MESSAGE_BASIC: (ce3f979b8433dc29): Executing operation read/Read+split+pair_with_one+group/Reify+group/Write
root: INFO: 2017-05-25T17:58:27.159Z: JOB_MESSAGE_DETAILED: (16fafad61e9ffc7a): Workers have started successfully.
root: INFO: 2017-05-25T17:59:40.820Z: JOB_MESSAGE_ERROR: (8bb4a045e88fffdf): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 706, in run
    self._load_main_session(self.local_staging_directory)
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 446, in _load_main_session
    pickler.load_session(session_file)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 247, in load_session
    return dill.load_session(file_path)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in load_session
    module = unpickler.load()
  File "/usr/lib/python2.7/pickle.py", line 858, in load
    dispatch[key](self)
  File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
    value = func(*args)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in _import_module
    return __import__(import_name)
ImportError: No module named gen_protos

root: INFO: 2017-05-25T17:59:42.923Z: JOB_MESSAGE_ERROR: (8bb4a045e88ff147): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 706, in run
    self._load_main_session(self.local_staging_directory)
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 446, in _load_main_session
    pickler.load_session(session_file)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 247, in load_session
    return dill.load_session(file_path)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in load_session
    module = unpickler.load()
  File "/usr/lib/python2.7/pickle.py", line 858, in load
    dispatch[key](self)
  File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
    value = func(*args)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in _import_module
    return __import__(import_name)
ImportError: No module named gen_protos

root: INFO: 2017-05-25T17:59:44.985Z: JOB_MESSAGE_ERROR: (8bb4a045e88ff2af): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 706, in run
    self._load_main_session(self.local_staging_directory)
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 446, in _load_main_session
    pickler.load_session(session_file)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 247, in load_session
    return dill.load_session(file_path)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in load_session
    module = unpickler.load()
  File "/usr/lib/python2.7/pickle.py", line 858, in load
    dispatch[key](self)
  File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
    value = func(*args)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in _import_module
    return __import__(import_name)
ImportError: No module named gen_protos

root: INFO: 2017-05-25T17:59:47.040Z: JOB_MESSAGE_ERROR: (8bb4a045e88ff417): Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 706, in run
    self._load_main_session(self.local_staging_directory)
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 446, in _load_main_session
    pickler.load_session(session_file)
  File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 247, in load_session
    return dill.load_session(file_path)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in load_session
    module = unpickler.load()
  File "/usr/lib/python2.7/pickle.py", line 858, in load
    dispatch[key](self)
  File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
    value = func(*args)
  File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in _import_module
    return __import__(import_name)
ImportError: No module named gen_protos

root: INFO: 2017-05-25T17:59:47.431Z: JOB_MESSAGE_DEBUG: (7a4c9a7af0e47bc0): Executing failure step failure24
root: INFO: 2017-05-25T17:59:47.434Z: JOB_MESSAGE_ERROR: (7a4c9a7af0e47dba): Workflow failed. Causes: (7a4c9a7af0e477cc): S01:write/Write/WriteImpl/DoOnce/Read+write/Write/WriteImpl/InitializeWrite failed., (bf54651899a3d481): Failed to split source.
root: INFO: 2017-05-25T17:59:47.508Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1db3a): Cleaning up.
root: INFO: 2017-05-25T17:59:47.511Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1d37c): Starting worker pool teardown.
root: INFO: 2017-05-25T17:59:47.514Z: JOB_MESSAGE_BASIC: (96fbd1da3af1dbbe): Stopping worker pool...
root: INFO: 2017-05-25T18:01:07.524Z: JOB_MESSAGE_BASIC: (96fbd1da3af1d821): Worker pool stopped.
root: INFO: 2017-05-25T18:01:07.595Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1d929): Tearing down pending resources...
root: INFO: Job 2017-05-25_10_56_08-15254750624546520082 is in state JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
Ran 2 tests in 393.584s

FAILED (errors=1)
Found: https://console.cloud.google.com/dataflow/job/2017-05-25_10_56_07-14183265070979746962?project=apache-beam-testing
Found: https://console.cloud.google.com/dataflow/job/2017-05-25_10_56_08-15254750624546520082?project=apache-beam-testing
Build step 'Execute shell' marked build as failure