You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2019/10/25 18:45:35 UTC

Build failed in Jenkins: beam_PostCommit_Py_VR_Dataflow #4904

See <https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/4904/display/redirect?page=changes>

Changes:

[lostluck] [Go SDK] Propagate data channel failures.

[lostluck] [Go SDK] Simplify datamgr tests.


------------------------------------------
[...truncated 204.35 KB...]
            "location": "storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-1025181944-524100.1572027584.524383/pickled_main_session",
            "name": "pickled_main_session"
          },
          {
            "location": "storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-1025181944-524100.1572027584.524383/dataflow_python_sdk.tar",
            "name": "dataflow_python_sdk.tar"
          },
          {
            "location": "storage.googleapis.com/temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-1025181944-524100.1572027584.524383/dataflow-worker.jar",
            "name": "dataflow-worker.jar"
          }
        ],
        "taskrunnerSettings": {
          "parallelWorkerSettings": {
            "baseUrl": "https://dataflow.googleapis.com",
            "servicePath": "https://dataflow.googleapis.com"
          }
        },
        "workerHarnessContainerImage": "gcr.io/cloud-dataflow/v1beta3/python3-fnapi:beam-master-20191010"
      }
    ]
  },
  "name": "beamapp-jenkins-1025181944-524100",
  "steps": [
    {
      "kind": "ParallelRead",
      "name": "s1",
      "properties": {
        "display_data": [
          {
            "key": "source",
            "label": "Read Source",
            "namespace": "apache_beam.io.iobase.Read",
            "shortValue": "_PubSubSource",
            "type": "STRING",
            "value": "apache_beam.io.gcp.pubsub._PubSubSource"
          },
          {
            "key": "subscription",
            "label": "Pubsub Subscription",
            "namespace": "apache_beam.io.gcp.pubsub._PubSubSource",
            "type": "STRING",
            "value": "projects/apache-beam-testing/subscriptions/exercise_streaming_metrics_subscription_inputba614ed6-e354-47fa-8750-0f79306072bf"
          },
          {
            "key": "with_attributes",
            "label": "With Attributes",
            "namespace": "apache_beam.io.gcp.pubsub._PubSubSource",
            "type": "BOOLEAN",
            "value": false
          }
        ],
        "format": "pubsub",
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "kind:bytes"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "out",
            "user_name": "ReadFromPubSub/Read.out"
          }
        ],
        "pubsub_subscription": "projects/apache-beam-testing/subscriptions/exercise_streaming_metrics_subscription_inputba614ed6-e354-47fa-8750-0f79306072bf",
        "user_name": "ReadFromPubSub/Read"
      }
    },
    {
      "kind": "ParallelDo",
      "name": "s2",
      "properties": {
        "display_data": [
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.ParDo",
            "shortValue": "StreamingUserMetricsDoFn",
            "type": "STRING",
            "value": "apache_beam.runners.dataflow.dataflow_exercise_streaming_metrics_pipeline.StreamingUserMetricsDoFn"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "kind:bytes"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "out",
            "user_name": "generate_metrics.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "out",
          "step_name": "s1"
        },
        "serialized_fn": "ref_AppliedPTransform_generate_metrics_4",
        "user_name": "generate_metrics"
      }
    },
    {
      "kind": "ParallelWrite",
      "name": "s3",
      "properties": {
        "display_data": [],
        "encoding": {
          "@type": "kind:windowed_value",
          "component_encodings": [
            {
              "@type": "kind:bytes"
            },
            {
              "@type": "kind:global_window"
            }
          ],
          "is_wrapper": true
        },
        "format": "pubsub",
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "out",
          "step_name": "s2"
        },
        "pubsub_topic": "projects/apache-beam-testing/topics/exercise_streaming_metrics_topic_outputba614ed6-e354-47fa-8750-0f79306072bf",
        "user_name": "dump_to_pub/Write/NativeWrite"
      }
    }
  ],
  "type": "JOB_TYPE_STREAMING"
}
root: INFO: Create job: <Job
 createTime: '2019-10-25T18:22:04.326878Z'
 currentStateTime: '1970-01-01T00:00:00Z'
 id: '2019-10-25_11_22_02-3245440077188862444'
 location: 'us-central1'
 name: 'beamapp-jenkins-1025181944-524100'
 projectId: 'apache-beam-testing'
 stageStates: []
 startTime: '2019-10-25T18:22:04.326878Z'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_STREAMING, 2)>
root: INFO: Created job with id: [2019-10-25_11_22_02-3245440077188862444]
root: INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_22_02-3245440077188862444?project=apache-beam-testing
root: INFO: Job 2019-10-25_11_22_02-3245440077188862444 is in state JOB_STATE_RUNNING
root: INFO: 2019-10-25T18:22:06.755Z: JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service Account.
root: INFO: 2019-10-25T18:22:07.419Z: JOB_MESSAGE_BASIC: Worker configuration: n1-standard-4 in us-central1-f.
root: INFO: 2019-10-25T18:22:07.999Z: JOB_MESSAGE_DETAILED: Expanding SplittableParDo operations into optimizable parts.
root: INFO: 2019-10-25T18:22:08.002Z: JOB_MESSAGE_DETAILED: Expanding CollectionToSingleton operations into optimizable parts.
root: INFO: 2019-10-25T18:22:08.010Z: JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
root: INFO: 2019-10-25T18:22:08.022Z: JOB_MESSAGE_DETAILED: Expanding SplittableProcessKeyed operations into optimizable parts.
root: INFO: 2019-10-25T18:22:08.025Z: JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into streaming Read/Write steps
root: INFO: 2019-10-25T18:22:08.031Z: JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
root: INFO: 2019-10-25T18:22:08.050Z: JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2019-10-25T18:22:08.052Z: JOB_MESSAGE_DETAILED: Fusing consumer generate_metrics into ReadFromPubSub/Read
root: INFO: 2019-10-25T18:22:08.055Z: JOB_MESSAGE_DETAILED: Fusing consumer dump_to_pub/Write/NativeWrite into generate_metrics
root: INFO: 2019-10-25T18:22:08.064Z: JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
root: INFO: 2019-10-25T18:22:08.103Z: JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
root: INFO: 2019-10-25T18:22:08.115Z: JOB_MESSAGE_DEBUG: Assigning stage ids.
root: INFO: 2019-10-25T18:22:08.260Z: JOB_MESSAGE_DEBUG: Executing wait step start2
root: INFO: 2019-10-25T18:22:08.272Z: JOB_MESSAGE_DEBUG: Starting worker pool setup.
root: INFO: 2019-10-25T18:22:08.276Z: JOB_MESSAGE_BASIC: Starting 1 workers...
root: INFO: 2019-10-25T18:22:11.914Z: JOB_MESSAGE_BASIC: Executing operation ReadFromPubSub/Read+generate_metrics+dump_to_pub/Write/NativeWrite
root: INFO: 2019-10-25T18:22:37.442Z: JOB_MESSAGE_WARNING: Your project already contains 100 Dataflow-created metric descriptors and Stackdriver will not create new Dataflow custom metrics for this job. Each unique user-defined metric name (independent of the DoFn in which it is defined) produces a new metric descriptor. To delete old / unused metric descriptors see https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list and https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
root: INFO: 2019-10-25T18:22:39.051Z: JOB_MESSAGE_DEBUG: Executing input step topology_init_attach_disk_input_step
root: INFO: 2019-10-25T18:22:39.051Z: JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service Account.
root: INFO: 2019-10-25T18:22:39.798Z: JOB_MESSAGE_BASIC: Worker configuration: n1-standard-4 in us-central1-f.
root: INFO: 2019-10-25T18:22:50.331Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
root: WARNING: Timing out on waiting for job 2019-10-25_11_22_02-3245440077188862444 after 61 seconds
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token HTTP/1.1" 200 181
--------------------- >> end captured logging << ---------------------
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_22_02-3245440077188862444?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_29_41-77363104164224219?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_37_48-9448646912262246256?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_22_01-17400920031260495807?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_29_44-6281080957860436121?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_22_08-841877224095369460?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_32_11-9097244419600983372?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_22_04-14443758797885380107?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_30_52-15201269282973104227?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_22_11-17493148254853310799?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_32_14-3296201393264149470?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_22_17-16037989632610654095?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_31_55-17613352741960983643?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_22_09-14392441184669551776?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_30_39-11581084055911043824?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_22_09-13840983184550339585?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_11_32_07-2269758629265624986?project=apache-beam-testing

----------------------------------------------------------------------
XML: nosetests-validatesRunnerStreamingTests-df-py35.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 17 tests in 1572.361s

FAILED (failures=1)

> Task :sdks:python:test-suites:dataflow:py35:validatesRunnerStreamingTests FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/test-suites/dataflow/py35/build.gradle'> line: 101

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py35:validatesRunnerStreamingTests'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 55m 8s
74 actionable tasks: 57 executed, 17 from cache

Publishing build scan...
https://gradle.com/s/ngzpmjzp5phje

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Jenkins build is back to normal : beam_PostCommit_Py_VR_Dataflow #4907

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/4907/display/redirect?page=changes>


---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Py_VR_Dataflow #4906

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/4906/display/redirect?page=changes>

Changes:

[kenn] [BEAM-8456] Add pipeline option to have Data Catalog truncate

[kenn] Inline Data Catalog TableUtils to its only use

[kenn] Make BigQueryUtils, PubsubUtils, GcsUtils implementations of

[lostluck] In Go, base counter names on PTransform unique_name


------------------------------------------
[...truncated 257.44 KB...]
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    }
                  ],
                  "is_pair_like": true,
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "out",
            "user_name": "assert:even/Unkey.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "out",
          "step_name": "s34"
        },
        "serialized_fn": "ref_AppliedPTransform_assert:even/Unkey_47",
        "user_name": "assert:even/Unkey"
      }
    },
    {
      "kind": "ParallelDo",
      "name": "s36",
      "properties": {
        "display_data": [
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.ParDo",
            "shortValue": "CallableWrapperDoFn",
            "type": "STRING",
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          },
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
            "type": "STRING",
            "value": "_equal"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    },
                    {
                      "@type": "FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    }
                  ],
                  "is_pair_like": true,
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "out",
            "user_name": "assert:even/Match.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "out",
          "step_name": "s35"
        },
        "serialized_fn": "ref_AppliedPTransform_assert:even/Match_48",
        "user_name": "assert:even/Match"
      }
    }
  ],
  "type": "JOB_TYPE_STREAMING"
}
--------------------- >> end captured logging << ---------------------
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_32_03-3534197668160839353?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_40_21-8298192497990019717?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_57-15751892311275125500?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_40_16-15132691085339751395?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_55-7008781251899431573?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_41_01-7922301397552588116?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_55-5176884905516752204?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_40_21-2993883387758624804?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_32_28-1499316927335475093?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_40_51-15577013263626981234?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_56-13850643872426641447?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_39_45-13410849295597912238?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_58-15360182855948055110?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_39_49-408937910471752682?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_59-1995663374099097412?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_40_18-2640060783718095810?project=apache-beam-testing

----------------------------------------------------------------------
XML: nosetests-validatesRunnerStreamingTests-df-py35.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 17 tests in 1046.666s

FAILED (errors=1)

> Task :sdks:python:test-suites:dataflow:py35:validatesRunnerStreamingTests FAILED

> Task :sdks:python:test-suites:dataflow:py37:validatesRunnerStreamingTests
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_30_33-5508180408056138333?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_38_13-8234486428997763276?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_30_25-7142482963627584670?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_38_27-4991306067131599037?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_30_25-14207338812022479990?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_38_13-11369662744571667586?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_30_25-15985094340690440555?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_38_10-2345653887387607232?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_30_25-3660564085306565171?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_38_34-17799680037906434138?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_30_26-6329172593872314064?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_38_14-7723128901141888989?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_30_25-17986544228197775456?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_39_13-14855372519974397010?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_30_25-3508197512948730434?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_38_19-15904915042329997373?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_45_27-8773156340793054351?project=apache-beam-testing
test_impulse (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_flatten_multiple_pcollections_having_multiple_consumers (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
Runs streaming Dataflow job and verifies that user metrics are reported ... ok
test_par_do_with_multiple_outputs_and_using_return (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_undeclared_outputs (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_dofn_lifecycle (apache_beam.transforms.dofn_lifecycle_test.DoFnLifecycleTest) ... ok
test_multiple_empty_outputs (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_par_do_with_multiple_outputs_and_using_yield (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_as_singleton_without_unique_labels (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_singleton_with_different_defaults (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_empty_singleton_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_list_and_as_dict_side_inputs (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_list_twice (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_dict_twice (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_default_value_singleton_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_flattened_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_iterable_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok

----------------------------------------------------------------------
XML: nosetests-validatesRunnerStreamingTests-df-py37.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 17 tests in 1389.692s

OK

> Task :sdks:python:test-suites:dataflow:py36:validatesRunnerStreamingTests
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_17-1759414162396630327?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_38_15-11401786791416858851?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_46_43-7303900075862542210?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_07-9926585598632850601?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_39_15-10931144595255605899?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_09-7231421753194234679?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_38_48-15173241985638874521?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_08-14898162187050477088?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_39_01-911455913415074429?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_09-143478715513339320?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_38_49-1684845187655313542?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_09-15569956532881364658?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_39_02-5260872071127555730?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_09-2163483424955551375?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_40_04-4306755316638495119?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_31_10-12976066704436893046?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_14_38_47-1600663789635340256?project=apache-beam-testing
Runs streaming Dataflow job and verifies that user metrics are reported ... ok
test_flatten_multiple_pcollections_having_multiple_consumers (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_undeclared_outputs (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_multiple_empty_outputs (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_impulse (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_par_do_with_multiple_outputs_and_using_return (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_dofn_lifecycle (apache_beam.transforms.dofn_lifecycle_test.DoFnLifecycleTest) ... ok
test_par_do_with_multiple_outputs_and_using_yield (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_as_dict_twice (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_singleton_without_unique_labels (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_list_and_as_dict_side_inputs (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_list_twice (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_singleton_with_different_defaults (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_default_value_singleton_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_empty_singleton_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_flattened_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_iterable_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok

----------------------------------------------------------------------
XML: nosetests-validatesRunnerStreamingTests-df-py36.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 17 tests in 1430.683s

OK

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py2:installGcpTest'.
> Process 'command 'sh'' finished with non-zero exit value 2

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/test-suites/dataflow/py35/build.gradle'> line: 101

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py35:validatesRunnerStreamingTests'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 51m 2s
72 actionable tasks: 55 executed, 17 from cache

Publishing build scan...
https://gradle.com/s/b6kcbxpqjkqg4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Py_VR_Dataflow #4905

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/4905/display/redirect?page=changes>

Changes:

[chadrik] [BEAM-7738] Add external transform support to PubsubIO

[chadrik] Adapt io.external.pubsub reader and writer to new API

[chadrik] Adjust the way that the new API is used to handle the encoded data

[chadrik] Must use Boolean arg to make use of the BooleanCoder in Java

[chadrik] Fix docs

[chadrik] Mark kafka and pubsub external transforms as experimental

[bhulette] Combine serializer and deserializer

[bhulette] Pass along exception

[bhulette] expose of method

[bhulette] fixup!

[bhulette] Merge incorrectly split strings


------------------------------------------
[...truncated 127.70 KB...]
writing requirements to apache_beam.egg-info/requires.txt
writing top-level names to apache_beam.egg-info/top_level.txt
reading manifest file 'apache_beam.egg-info/SOURCES.txt'
reading manifest template 'MANIFEST.in'
<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/build/gradleenv/-1734967054/lib/python3.5/site-packages/setuptools/dist.py>:475: UserWarning: Normalizing '2.18.0.dev' to '2.18.0.dev0'
  normalized_version,
warning: no files found matching 'README.md'
warning: no files found matching 'NOTICE'
warning: no files found matching 'LICENSE'
writing manifest file 'apache_beam.egg-info/SOURCES.txt'
<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/datastore/v1/datastoreio.py>:59: UserWarning: Datastore IO will support Python 3 after replacing googledatastore by google-cloud-datastore, see: BEAM-4543.
  warnings.warn('Datastore IO will support Python 3 after replacing '
<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/apache_beam/io/vcfio.py>:47: UserWarning: VCF IO will support Python 3 after migration to Nucleus, see: BEAM-5628.
  warnings.warn("VCF IO will support Python 3 after migration to Nucleus, "
<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/apache_beam/transforms/trigger_test.py>:509: YAMLLoadWarning: calling yaml.load_all() without Loader=... is deprecated, as the default Loader is unsafe. Please read https://msg.pyyaml.org/load for full details.
  for spec in yaml.load_all(open(transcript_filename)):

> Task :sdks:python:test-suites:dataflow:py36:validatesRunnerBatchTests
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_43_41-16989224414865609666?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_51_30-10803779485961822285?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_59_32-2309976599079519435?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_43_36-3793429355163529886?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_51_59-18026449350084989759?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_43_37-10438880809528769376?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_51_59-2343945612167459810?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_43_36-4339449772742423742?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_51_23-5058653616974585047?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_59_31-12755320628569783998?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_43_37-17952491121718842396?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_52_20-7707514663640409131?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_43_37-16850152286926114602?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_53_16-8032941980000306007?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_43_37-14316538743603578186?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_53_25-10505192501500666393?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_43_35-1693575571687326564?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_12_50_58-3235103308737129844?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_00_04-13650670241554036383?project=apache-beam-testing
test_read_metrics (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_impulse (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
Runs streaming Dataflow job and verifies that user metrics are reported ... ok
test_dofn_lifecycle (apache_beam.transforms.dofn_lifecycle_test.DoFnLifecycleTest) ... ok
test_flatten_multiple_pcollections_having_multiple_consumers (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_multiple_empty_outputs (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_par_do_with_multiple_outputs_and_using_return (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_par_do_with_multiple_outputs_and_using_yield (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_as_dict_twice (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_list_and_as_dict_side_inputs (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_undeclared_outputs (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_as_list_twice (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_singleton_with_different_defaults (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_singleton_without_unique_labels (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_empty_singleton_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_default_value_singleton_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_flattened_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_iterable_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_multi_valued_singleton_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok

----------------------------------------------------------------------
XML: nosetests-validatesRunnerBatchTests-df-py36.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 19 tests in 1523.974s

OK

> Task :sdks:python:test-suites:dataflow:py36:validatesRunnerStreamingTests
>>> RUNNING integration tests with pipeline options: --runner=TestDataflowRunner --project=apache-beam-testing --staging_location=gs://temp-storage-for-end-to-end-tests/staging-it --temp_location=gs://temp-storage-for-end-to-end-tests/temp-it --output=gs://temp-storage-for-end-to-end-tests/py-it-cloud/output --sdk_location=<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/build/apache-beam.tar.gz> --requirements_file=postcommit_requirements.txt --num_workers=1 --sleep_secs=20 --streaming --dataflow_worker_jar=<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/runners/google-cloud-dataflow-java/worker/build/libs/beam-runners-google-cloud-dataflow-java-fn-api-worker-2.18.0-SNAPSHOT.jar> --kms_key_name=projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test --dataflow_kms_key=projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test
>>>   test options: --nocapture --processes=8 --process-timeout=4500 --attr=ValidatesRunner,!sickbay-streaming
running nosetests
running egg_info
writing apache_beam.egg-info/PKG-INFO
writing dependency_links to apache_beam.egg-info/dependency_links.txt
writing entry points to apache_beam.egg-info/entry_points.txt
writing requirements to apache_beam.egg-info/requires.txt
writing top-level names to apache_beam.egg-info/top_level.txt
reading manifest file 'apache_beam.egg-info/SOURCES.txt'
reading manifest template 'MANIFEST.in'
<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/setuptools/dist.py>:475: UserWarning: Normalizing '2.18.0.dev' to '2.18.0.dev0'
  normalized_version,
warning: no files found matching 'README.md'
warning: no files found matching 'NOTICE'
warning: no files found matching 'LICENSE'
writing manifest file 'apache_beam.egg-info/SOURCES.txt'
<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/datastore/v1/datastoreio.py>:59: UserWarning: Datastore IO will support Python 3 after replacing googledatastore by google-cloud-datastore, see: BEAM-4543.
  warnings.warn('Datastore IO will support Python 3 after replacing '
<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/apache_beam/io/vcfio.py>:47: UserWarning: VCF IO will support Python 3 after migration to Nucleus, see: BEAM-5628.
  warnings.warn("VCF IO will support Python 3 after migration to Nucleus, "
<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/apache_beam/transforms/trigger_test.py>:509: YAMLLoadWarning: calling yaml.load_all() without Loader=... is deprecated, as the default Loader is unsafe. Please read https://msg.pyyaml.org/load for full details.
  for spec in yaml.load_all(open(transcript_filename)):

> Task :sdks:python:test-suites:dataflow:py2:validatesRunnerStreamingTests
test_impulse (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_dofn_lifecycle (apache_beam.transforms.dofn_lifecycle_test.DoFnLifecycleTest) ... ok
test_undeclared_outputs (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
Runs streaming Dataflow job and verifies that user metrics are reported ... ok
test_par_do_with_multiple_outputs_and_using_return (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_par_do_with_multiple_outputs_and_using_yield (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_flatten_multiple_pcollections_having_multiple_consumers (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_multiple_empty_outputs (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_as_dict_twice (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_list_and_as_dict_side_inputs (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_flattened_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_singleton_with_different_defaults (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_default_value_singleton_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_empty_singleton_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_list_twice (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_singleton_without_unique_labels (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_iterable_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok

----------------------------------------------------------------------
XML: nosetests-validatesRunnerStreamingTests-df.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 17 tests in 1325.352s

OK
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_05_23-9936418597564591628?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_12_41-12972004213030210039?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_20_24-9928319785508760034?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_05_31-12712044620895618834?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_13_27-7809876157764011041?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_05_23-3974944927100754492?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_12_43-455546527274489359?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_05_24-14071072793170925040?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_13_38-18006204521834117474?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_05_24-16017473037200560475?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_13_41-14528528840935832614?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_05_25-9069096702696000650?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_13_34-10881726004854093007?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_05_24-17260952895134991129?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_13_38-9553372520390173522?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_05_25-883107492526331014?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_13_19-5124687898521274666?project=apache-beam-testing

> Task :sdks:python:test-suites:dataflow:py35:validatesRunnerStreamingTests
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_09_00-7356355479494592549?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_16_38-1405733891164098509?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_24_36-5001697231129627793?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_08_52-4161068534476775737?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_17_27-17340689613530005528?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_08_54-2167937008689635401?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_17_35-17186740081622359122?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_08_54-14180677938363158621?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_17_49-3633813494619465067?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_08_54-13163990990752528206?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_16_55-12461455694327863848?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_08_54-2754105520134809049?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_17_09-15805718140918313228?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_08_53-10019684080004555403?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_17_20-13382061813391765678?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_08_56-14384301025689105347?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_17_32-16253314869088664945?project=apache-beam-testing
Runs streaming Dataflow job and verifies that user metrics are reported ... ok
test_multiple_empty_outputs (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_par_do_with_multiple_outputs_and_using_return (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_par_do_with_multiple_outputs_and_using_yield (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_dofn_lifecycle (apache_beam.transforms.dofn_lifecycle_test.DoFnLifecycleTest) ... ok
test_undeclared_outputs (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_flatten_multiple_pcollections_having_multiple_consumers (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_impulse (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_as_dict_twice (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_list_twice (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_list_and_as_dict_side_inputs (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_singleton_without_unique_labels (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_empty_singleton_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_default_value_singleton_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_flattened_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_singleton_with_different_defaults (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_iterable_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok

----------------------------------------------------------------------
XML: nosetests-validatesRunnerStreamingTests-df-py35.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 17 tests in 1422.215s

OK

> Task :sdks:python:test-suites:dataflow:py36:validatesRunnerStreamingTests
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_09_07-17754697858354334579?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_16_46-5128104251999246720?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_25_10-8011233984079472278?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_08_59-723257029908667211?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_16_54-6286798348118751372?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_09_00-18336621129848792539?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_17_21-17635874045355636237?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_09_00-16868429479091093598?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_17_27-4273083954838071844?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_09_00-10391306198346595690?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_17_15-10290186667460944208?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_09_00-5391340687117549832?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_17_17-12552910668985388750?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_09_01-777888947550719395?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_17_20-13007252895614025092?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_09_01-10992020524437087448?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-10-25_13_17_10-18188283979228118238?project=apache-beam-testing
Runs streaming Dataflow job and verifies that user metrics are reported ... ok
test_dofn_lifecycle (apache_beam.transforms.dofn_lifecycle_test.DoFnLifecycleTest) ... ok
test_undeclared_outputs (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_par_do_with_multiple_outputs_and_using_return (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_multiple_empty_outputs (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_par_do_with_multiple_outputs_and_using_yield (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_flatten_multiple_pcollections_having_multiple_consumers (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_impulse (apache_beam.transforms.ptransform_test.PTransformTest) ... ok
test_as_dict_twice (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_list_and_as_dict_side_inputs (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_empty_singleton_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_list_twice (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_singleton_without_unique_labels (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_as_singleton_with_different_defaults (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_flattened_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_default_value_singleton_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok
test_iterable_side_input (apache_beam.transforms.sideinputs_test.SideInputsTest) ... ok

----------------------------------------------------------------------
XML: nosetests-validatesRunnerStreamingTests-df-py36.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 17 tests in 1424.618s

OK

FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py37:installGcpTest'.
> Process 'command 'sh'' finished with non-zero exit value 2

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 50m 25s
72 actionable tasks: 57 executed, 15 from cache

Publishing build scan...
https://gradle.com/s/hh47zjkm7udos

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org