You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2019/09/17 01:09:20 UTC

Build failed in Jenkins: beam_PostCommit_Python36 #492

See <https://builds.apache.org/job/beam_PostCommit_Python36/492/display/redirect>

------------------------------------------
[...truncated 293.09 KB...]
            "value": false
          }
        ],
        "format": "bigquery",
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                      "component_encodings": []
                    },
                    {
                      "@type": "FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                      "component_encodings": []
                    }
                  ],
                  "is_pair_like": true
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "out",
            "user_name": "read.out"
          }
        ],
        "user_name": "read"
      }
    },
    {
      "kind": "ParallelWrite",
      "name": "s2",
      "properties": {
        "create_disposition": "CREATE_IF_NEEDED",
        "dataset": "python_query_to_table_15686793514785",
        "display_data": [],
        "encoding": {
          "@type": "kind:windowed_value",
          "component_encodings": [
            {
              "@type": "RowAsDictJsonCoder$eNprYE5OLEhMzkiNT0pNzNXLzNdLTy7QS8pMLyxNLaqML8nPzynmCsovdyx2yUwu8SrOz3POT0kt4ipk0GwsZKwtZErSAwBK5xfp",
              "component_encodings": []
            },
            {
              "@type": "kind:global_window"
            }
          ],
          "is_wrapper": true
        },
        "format": "bigquery",
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "out",
          "step_name": "s1"
        },
        "schema": "{\"fields\": [{\"name\": \"fruit\", \"type\": \"STRING\", \"mode\": \"NULLABLE\"}]}",
        "table": "output_table",
        "user_name": "write/WriteToBigQuery/NativeWrite",
        "write_disposition": "WRITE_EMPTY"
      }
    }
  ],
  "type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
 createTime: '2019-09-17T00:16:04.113252Z'
 currentStateTime: '1970-01-01T00:00:00Z'
 id: '2019-09-16_17_16_02-10508960083829822211'
 location: 'us-central1'
 name: 'beamapp-jenkins-0917001552-662893'
 projectId: 'apache-beam-testing'
 stageStates: []
 startTime: '2019-09-17T00:16:04.113252Z'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2019-09-16_17_16_02-10508960083829822211]
root: INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_16_02-10508960083829822211?project=apache-beam-testing
root: INFO: Job 2019-09-16_17_16_02-10508960083829822211 is in state JOB_STATE_RUNNING
root: INFO: 2019-09-17T00:16:02.640Z: JOB_MESSAGE_DETAILED: Autoscaling was automatically enabled for job 2019-09-16_17_16_02-10508960083829822211.
root: INFO: 2019-09-17T00:16:02.640Z: JOB_MESSAGE_DETAILED: Autoscaling is enabled for job 2019-09-16_17_16_02-10508960083829822211. The number of workers will be between 1 and 1000.
root: INFO: 2019-09-17T00:16:05.907Z: JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service Account.
root: INFO: 2019-09-17T00:16:06.601Z: JOB_MESSAGE_BASIC: Worker configuration: n1-standard-1 in us-central1-a.
root: INFO: 2019-09-17T00:16:08.369Z: JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
root: INFO: 2019-09-17T00:16:08.409Z: JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into optimizable parts.
root: INFO: 2019-09-17T00:16:08.440Z: JOB_MESSAGE_DETAILED: Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
root: INFO: 2019-09-17T00:16:08.481Z: JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
root: INFO: 2019-09-17T00:16:08.679Z: JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2019-09-17T00:16:08.715Z: JOB_MESSAGE_DETAILED: Fusing consumer write/WriteToBigQuery/NativeWrite into read
root: INFO: 2019-09-17T00:16:08.753Z: JOB_MESSAGE_DEBUG: Workflow config is missing a default resource spec.
root: INFO: 2019-09-17T00:16:08.787Z: JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
root: INFO: 2019-09-17T00:16:08.824Z: JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
root: INFO: 2019-09-17T00:16:08.856Z: JOB_MESSAGE_DEBUG: Assigning stage ids.
root: INFO: 2019-09-17T00:16:08.979Z: JOB_MESSAGE_DEBUG: Executing wait step start3
root: INFO: 2019-09-17T00:16:09.052Z: JOB_MESSAGE_BASIC: Executing operation read+write/WriteToBigQuery/NativeWrite
root: INFO: 2019-09-17T00:16:09.096Z: JOB_MESSAGE_DEBUG: Starting worker pool setup.
root: INFO: 2019-09-17T00:16:09.132Z: JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-a...
root: INFO: 2019-09-17T00:16:11.426Z: JOB_MESSAGE_BASIC: BigQuery query issued as job: "dataflow_job_13580043246037626969". You can check its status with the bq tool: "bq show -j --project_id=apache-beam-testing dataflow_job_13580043246037626969".
root: INFO: 2019-09-17T00:16:41.795Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 based on the rate of progress in the currently running step(s).
root: INFO: 2019-09-17T00:17:13.258Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
root: INFO: 2019-09-17T00:17:13.283Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
root: INFO: 2019-09-17T00:18:24.170Z: JOB_MESSAGE_BASIC: BigQuery query completed, job : "dataflow_job_13580043246037626969"
root: INFO: 2019-09-17T00:18:24.674Z: JOB_MESSAGE_BASIC: BigQuery export job "dataflow_job_17323108154119232770" started. You can check its status with the bq tool: "bq show -j --project_id=apache-beam-testing dataflow_job_17323108154119232770".
root: INFO: 2019-09-17T00:18:55.030Z: JOB_MESSAGE_DETAILED: BigQuery export job progress: "dataflow_job_17323108154119232770" observed total of 1 exported files thus far.
root: INFO: 2019-09-17T00:18:55.059Z: JOB_MESSAGE_BASIC: BigQuery export job finished: "dataflow_job_17323108154119232770"
root: INFO: 2019-09-17T00:21:37.313Z: JOB_MESSAGE_BASIC: Executing BigQuery import job "dataflow_job_13580043246037630443". You can check its status with the bq tool: "bq show -j --project_id=apache-beam-testing dataflow_job_13580043246037630443".
root: INFO: 2019-09-17T00:21:48.138Z: JOB_MESSAGE_BASIC: BigQuery import job "dataflow_job_13580043246037630443" done.
root: INFO: 2019-09-17T00:21:48.871Z: JOB_MESSAGE_BASIC: Finished operation read+write/WriteToBigQuery/NativeWrite
root: INFO: 2019-09-17T00:21:48.978Z: JOB_MESSAGE_DEBUG: Executing success step success1
root: INFO: 2019-09-17T00:21:49.100Z: JOB_MESSAGE_DETAILED: Cleaning up.
root: INFO: 2019-09-17T00:21:49.411Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
root: INFO: 2019-09-17T00:21:49.435Z: JOB_MESSAGE_BASIC: Stopping worker pool...
root: INFO: 2019-09-17T00:25:39.137Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
root: INFO: 2019-09-17T00:25:39.182Z: JOB_MESSAGE_BASIC: Worker pool stopped.
root: INFO: 2019-09-17T00:25:39.223Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
root: INFO: Job 2019-09-16_17_16_02-10508960083829822211 is in state JOB_STATE_DONE
root: INFO: Attempting to perform query SELECT fruit from `python_query_to_table_15686793514785.output_table`; to BQ
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
urllib3.util.retry: DEBUG: Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token HTTP/1.1" 200 181
urllib3.connectionpool: DEBUG: Starting new HTTPS connection (1): www.googleapis.com:443
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs HTTP/1.1" 200 None
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/8494fdf8-bb53-481d-a395-2057d4afa332?maxResults=0&timeoutMs=10000&location=US HTTP/1.1" 200 None
google.api_core.retry: DEBUG: Retrying due to , sleeping 0.6s ...
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/8494fdf8-bb53-481d-a395-2057d4afa332?maxResults=0&timeoutMs=10000&location=US HTTP/1.1" 200 None
google.api_core.retry: DEBUG: Retrying due to , sleeping 3.4s ...
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/8494fdf8-bb53-481d-a395-2057d4afa332?maxResults=0&timeoutMs=10000&location=US HTTP/1.1" 200 None
google.api_core.retry: DEBUG: Retrying due to , sleeping 0.1s ...
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/8494fdf8-bb53-481d-a395-2057d4afa332?maxResults=0&timeoutMs=10000&location=US HTTP/1.1" 200 None
google.api_core.retry: DEBUG: Retrying due to , sleeping 8.7s ...
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/8494fdf8-bb53-481d-a395-2057d4afa332?maxResults=0&timeoutMs=10000&location=US HTTP/1.1" 200 None
--------------------- >> end captured logging << ---------------------
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1145: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_04_43-4448355501589393635?project=apache-beam-testing
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_20_29-5780207762881590142?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_30_16-16170237700439249740?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_49_27-559991115347331331?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_58_44-12266230163656671649?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_04_43-7935901414812933760?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:696: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_30_43-5550837763545844754?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_41_37-1145334246709632111?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_52_12-2179136832100912093?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_04_42-8292220058444789754?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1145: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_20_45-5170533144243815876?project=apache-beam-testing
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_30_51-144124697147316177?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1145: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_04_39-4754013314882191723?project=apache-beam-testing
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_26_19-4809877208603741641?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_38_33-13278495326597363760?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_49_57-1809862730340949694?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:793: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:232: FutureWarning: MatchAll is experimental.
  | 'GetPath' >> beam.Map(lambda metadata: metadata.path))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:243: FutureWarning: MatchAll is experimental.
  | 'Checksums' >> beam.Map(compute_hash))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:243: FutureWarning: ReadMatches is experimental.
  | 'Checksums' >> beam.Map(compute_hash))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_04_38-18047342197036212085?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_15_05-11592596305628108035?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_24_38-16680244348749930310?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_36_07-1954043678713500194?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_46_59-11142786497264364832?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_56_58-15633919976141295013?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_04_38-18271516153335019112?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:696: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_14_27-15636791379845563612?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_25_36-13627301214111941274?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_38_32-8658819626547148089?project=apache-beam-testing
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_48_11-7138981744685782824?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_test.py>:577: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_04_45-5868143654202744283?project=apache-beam-testing
  streaming = self.test_pipeline.options.view_as(StandardOptions).streaming
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_16_02-10508960083829822211?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1145: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_27_04-15252453329361212264?project=apache-beam-testing
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_37_18-17828931726892861604?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:696: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_46_52-2718219057488798230?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/big_query_query_to_table_pipeline.py>:73: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_56_21-17521011865131162664?project=apache-beam-testing
  kms_key=kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_04_39-13237318276121454825?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1145: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_16_16-11177833184087310911?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:793: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_29_03-1458796424610435007?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_17_40_29-5769408233565076792?project=apache-beam-testing

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py36.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 45 tests in 3891.850s

FAILED (SKIP=6, errors=2)

> Task :sdks:python:test-suites:dataflow:py36:postCommitIT FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/test-suites/dataflow/py36/build.gradle'> line: 56

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py36:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 5m 42s
64 actionable tasks: 47 executed, 17 from cache

Publishing build scan...
https://gradle.com/s/rn43kbm5jtklq

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Jenkins build is back to normal : beam_PostCommit_Python36 #494

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python36/494/display/redirect?page=changes>


---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python36 #493

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python36/493/display/redirect?page=changes>

Changes:

[hannahjiang] BEAM-8165 pass root as a param and keep default root

------------------------------------------
[...truncated 188.83 KB...]
root: INFO: Read from given query (SELECT total_score FROM `apache-beam-testing.leader_board_it_dataset1568683319104.leader_board_users` WHERE total_score=5000 LIMIT 1), total rows 1
root: INFO: Generate checksum: de00231fe6730b972c0ff60a99988438911cda53
root: INFO: Attempting to perform query SELECT total_score FROM `apache-beam-testing.leader_board_it_dataset1568683319104.leader_board_teams` WHERE total_score=5000 LIMIT 1 to BQ
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
urllib3.util.retry: DEBUG: Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token HTTP/1.1" 200 181
urllib3.connectionpool: DEBUG: Starting new HTTPS connection (1): www.googleapis.com:443
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs HTTP/1.1" 200 None
root: WARNING: Retry with exponential backoff: waiting for 2.5282757417406287 seconds before retrying _query_with_retry because we caught exception: google.api_core.exceptions.NotFound: 404 Not found: Table apache-beam-testing:leader_board_it_dataset1568683319104.leader_board_teams was not found in location US
 Traceback for above exception (most recent call last):
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/utils/retry.py",> line 206, in wrapper
    return fun(*args, **kwargs)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py",> line 102, in _query_with_retry
    rows = query_job.result(timeout=60)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/cloud/bigquery/job.py",> line 2877, in result
    super(QueryJob, self).result(timeout=timeout)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/cloud/bigquery/job.py",> line 733, in result
    return super(_AsyncJob, self).result(timeout=timeout)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/api_core/future/polling.py",> line 127, in result
    raise self._exception

root: INFO: Attempting to perform query SELECT total_score FROM `apache-beam-testing.leader_board_it_dataset1568683319104.leader_board_teams` WHERE total_score=5000 LIMIT 1 to BQ
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
urllib3.util.retry: DEBUG: Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token HTTP/1.1" 200 181
urllib3.connectionpool: DEBUG: Starting new HTTPS connection (1): www.googleapis.com:443
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs HTTP/1.1" 200 None
root: WARNING: Retry with exponential backoff: waiting for 9.726343502925776 seconds before retrying _query_with_retry because we caught exception: google.api_core.exceptions.NotFound: 404 Not found: Table apache-beam-testing:leader_board_it_dataset1568683319104.leader_board_teams was not found in location US
 Traceback for above exception (most recent call last):
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/utils/retry.py",> line 206, in wrapper
    return fun(*args, **kwargs)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py",> line 102, in _query_with_retry
    rows = query_job.result(timeout=60)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/cloud/bigquery/job.py",> line 2877, in result
    super(QueryJob, self).result(timeout=timeout)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/cloud/bigquery/job.py",> line 733, in result
    return super(_AsyncJob, self).result(timeout=timeout)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/api_core/future/polling.py",> line 127, in result
    raise self._exception

root: INFO: Attempting to perform query SELECT total_score FROM `apache-beam-testing.leader_board_it_dataset1568683319104.leader_board_teams` WHERE total_score=5000 LIMIT 1 to BQ
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
urllib3.util.retry: DEBUG: Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token HTTP/1.1" 200 181
urllib3.connectionpool: DEBUG: Starting new HTTPS connection (1): www.googleapis.com:443
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs HTTP/1.1" 200 None
root: WARNING: Retry with exponential backoff: waiting for 11.485569382321783 seconds before retrying _query_with_retry because we caught exception: google.api_core.exceptions.NotFound: 404 Not found: Table apache-beam-testing:leader_board_it_dataset1568683319104.leader_board_teams was not found in location US
 Traceback for above exception (most recent call last):
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/utils/retry.py",> line 206, in wrapper
    return fun(*args, **kwargs)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py",> line 102, in _query_with_retry
    rows = query_job.result(timeout=60)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/cloud/bigquery/job.py",> line 2877, in result
    super(QueryJob, self).result(timeout=timeout)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/cloud/bigquery/job.py",> line 733, in result
    return super(_AsyncJob, self).result(timeout=timeout)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/api_core/future/polling.py",> line 127, in result
    raise self._exception

root: INFO: Attempting to perform query SELECT total_score FROM `apache-beam-testing.leader_board_it_dataset1568683319104.leader_board_teams` WHERE total_score=5000 LIMIT 1 to BQ
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
urllib3.util.retry: DEBUG: Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token HTTP/1.1" 200 181
urllib3.connectionpool: DEBUG: Starting new HTTPS connection (1): www.googleapis.com:443
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs HTTP/1.1" 200 None
root: WARNING: Retry with exponential backoff: waiting for 23.021208031971447 seconds before retrying _query_with_retry because we caught exception: google.api_core.exceptions.NotFound: 404 Not found: Table apache-beam-testing:leader_board_it_dataset1568683319104.leader_board_teams was not found in location US
 Traceback for above exception (most recent call last):
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/utils/retry.py",> line 206, in wrapper
    return fun(*args, **kwargs)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py",> line 102, in _query_with_retry
    rows = query_job.result(timeout=60)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/cloud/bigquery/job.py",> line 2877, in result
    super(QueryJob, self).result(timeout=timeout)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/cloud/bigquery/job.py",> line 733, in result
    return super(_AsyncJob, self).result(timeout=timeout)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/api_core/future/polling.py",> line 127, in result
    raise self._exception

root: INFO: Attempting to perform query SELECT total_score FROM `apache-beam-testing.leader_board_it_dataset1568683319104.leader_board_teams` WHERE total_score=5000 LIMIT 1 to BQ
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
urllib3.util.retry: DEBUG: Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token HTTP/1.1" 200 181
urllib3.connectionpool: DEBUG: Starting new HTTPS connection (1): www.googleapis.com:443
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs HTTP/1.1" 200 None
root: WARNING: Retry with exponential backoff: waiting for 42.31798882602364 seconds before retrying _query_with_retry because we caught exception: google.api_core.exceptions.NotFound: 404 Not found: Table apache-beam-testing:leader_board_it_dataset1568683319104.leader_board_teams was not found in location US
 Traceback for above exception (most recent call last):
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/utils/retry.py",> line 206, in wrapper
    return fun(*args, **kwargs)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py",> line 102, in _query_with_retry
    rows = query_job.result(timeout=60)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/cloud/bigquery/job.py",> line 2877, in result
    super(QueryJob, self).result(timeout=timeout)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/cloud/bigquery/job.py",> line 733, in result
    return super(_AsyncJob, self).result(timeout=timeout)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/-1734967053/lib/python3.6/site-packages/google/api_core/future/polling.py",> line 127, in result
    raise self._exception

root: INFO: Attempting to perform query SELECT total_score FROM `apache-beam-testing.leader_board_it_dataset1568683319104.leader_board_teams` WHERE total_score=5000 LIMIT 1 to BQ
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
urllib3.util.retry: DEBUG: Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token HTTP/1.1" 200 181
urllib3.connectionpool: DEBUG: Starting new HTTPS connection (1): www.googleapis.com:443
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs HTTP/1.1" 200 None
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
urllib3.util.retry: DEBUG: Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token HTTP/1.1" 200 181
urllib3.connectionpool: DEBUG: Starting new HTTPS connection (1): www.googleapis.com:443
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "DELETE /bigquery/v2/projects/apache-beam-testing/datasets/leader_board_it_dataset1568683319104?deleteContents=true HTTP/1.1" 204 0
--------------------- >> end captured logging << ---------------------
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1145: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_22_11-11665898112606205467?project=apache-beam-testing
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_38_59-8773499410718174647?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_48_39-5786686498867169991?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_57_57-17382974224420650058?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_19_09_01-7826586305481796715?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_22_08-8148531670326089751?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:696: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_49_30-1545672843272053544?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_19_00_15-1564555856548847088?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_19_10_07-12390285854667830608?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1145: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_22_11-16102909176300173982?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_37_18-10558352859771283416?project=apache-beam-testing
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_47_44-5971316851121440948?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_19_06_35-628383221623054414?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1145: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_22_08-4151082446639164191?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:793: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_34_15-9511418161016058334?project=apache-beam-testing
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_46_24-2707349394095973067?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_56_26-12229850510810866003?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_22_08-1903361726213402625?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_45_49-3799217730240123789?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_56_18-9278152417967469376?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1145: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_19_06_27-5879717249118003220?project=apache-beam-testing
  experiments = p.options.view_as(DebugOptions).experiments or []
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:793: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:232: FutureWarning: MatchAll is experimental.
  | 'GetPath' >> beam.Map(lambda metadata: metadata.path))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:243: FutureWarning: MatchAll is experimental.
  | 'Checksums' >> beam.Map(compute_hash))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:243: FutureWarning: ReadMatches is experimental.
  | 'Checksums' >> beam.Map(compute_hash))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_22_07-17636302065221186426?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:696: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_32_37-15343564949765432618?project=apache-beam-testing
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_41_23-10249637069132736129?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_52_11-17901287474390103065?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_19_02_50-17741434623810597808?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_19_14_08-3832043688850206157?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_22_13-6499978359723869845?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_32_00-16148709513167406456?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_42_51-4623470147623327568?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_53_55-14378665674437208716?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_19_03_00-1967939982189922129?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_test.py>:577: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  streaming = self.test_pipeline.options.view_as(StandardOptions).streaming
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1145: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_22_08-6703107425144788674?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_34_11-14441787403463721091?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_44_22-17294988434828944833?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_18_54_32-10070590817092297166?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:696: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-09-16_19_04_41-6960995567086625432?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/big_query_query_to_table_pipeline.py>:73: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=kms_key))

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py36.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 45 tests in 3718.809s

FAILED (SKIP=6, errors=1)

> Task :sdks:python:test-suites:dataflow:py36:postCommitIT FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/test-suites/dataflow/py36/build.gradle'> line: 56

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py36:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 2m 59s
64 actionable tasks: 47 executed, 17 from cache

Publishing build scan...
https://gradle.com/s/bbd4xgpdeue3u

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org