You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2021/07/23 01:25:53 UTC

Build failed in Jenkins: beam_PostCommit_Python38 #1465

See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1465/display/redirect?page=changes>

Changes:

[Kyle Weaver] [BEAM-12625] Annotate

[noreply] [BEAM-12643] Resolved the concurrent writes issue for parallel tests

[noreply] [BEAM-10212] Add caching state client wrapper (#15170)


------------------------------------------
[...truncated 23.87 MB...]
            "value": "add_attribute"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    },
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    }
                  ],
                  "is_pair_like": true,
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "add_attribute.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s2"
        },
        "serialized_fn": "ref_AppliedPTransform_add_attribute_5",
        "user_name": "add_attribute"
      }
    },
    {
      "kind": "ParallelDo",
      "name": "s4",
      "properties": {
        "display_data": [
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.ParDo",
            "shortValue": "CallableWrapperDoFn",
            "type": "STRING",
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          },
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
            "type": "STRING",
            "value": "message_to_proto_str"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "kind:bytes"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "WriteToPubSub/ToProtobuf.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s3"
        },
        "serialized_fn": "ref_AppliedPTransform_WriteToPubSub-ToProtobuf_7",
        "user_name": "WriteToPubSub/ToProtobuf"
      }
    },
    {
      "kind": "ParallelWrite",
      "name": "s5",
      "properties": {
        "display_data": [],
        "encoding": {
          "@type": "kind:windowed_value",
          "component_encodings": [
            {
              "@type": "kind:bytes"
            },
            {
              "@type": "kind:global_window"
            }
          ],
          "is_wrapper": true
        },
        "format": "pubsub",
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s4"
        },
        "pubsub_id_label": "id",
        "pubsub_serialized_attributes_fn": "",
        "pubsub_timestamp_label": "timestamp",
        "pubsub_topic": "projects/apache-beam-testing/topics/psit_topic_outputc6a52c15-7f83-4674-905b-22ba86a509a3",
        "user_name": "WriteToPubSub/Write/NativeWrite"
      }
    }
  ],
  "type": "JOB_TYPE_STREAMING"
}
--------------------- >> end captured logging << ---------------------
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_05_00-17568754818138381812?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_19_20-12387727154330768512?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_46_44-5248962415258411364?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_55_39-4943236387291924541?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_18_03_57-10350554309621953977?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_04_27-4872838096293966044?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_23_27-6716091158996276170?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_33_01-3204202693674129630?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_41_43-5532485479966459731?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_50_14-840026504670175564?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_58_52-164340920566717596?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_18_07_42-2962908502847090832?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_18_16_10-403113871550841513?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_04_57-11966251605248828670?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_14_07-4607067867790958106?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_26_02-12780420530795182169?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_34_41-2523730719379604753?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_43_12-5320103243668571407?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_52_24-12314575651542958968?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_18_00_43-13103179999567324565?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_18_09_30-17573061966366547806?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_07_02-5535267738999211884?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_19_04-4790537808618397926?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_27_52-8405863592802645085?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_36_22-14146784864298885342?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_44_57-4337176903637281725?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_54_13-10516140559411688253?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_18_02_41-3799353457623507956?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_04_23-5204310370225083284?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_23_03-7723529950081689171?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_31_30-1740570413956004351?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_39_55-3153016247527287419?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_48_59-12452086908062869092?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_58_21-7347607350132359198?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_18_07_30-5799830198369998963?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_18_16_57-3055874307980347673?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_04_25-1814702768496228593?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_13_47-5888919896422017307?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_24_17-5051394784800150070?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_34_43-18266745080924282263?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_45_44-1647649960773901801?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_54_50-10133103280363411306?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_18_03_53-7682630778608697820?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_04_29-10284008362430525520?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_14_12-14347154662905936191?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_23_15-17196663008852391807?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_32_22-7808091209336042679?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_41_44-8486629199671037708?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_50_46-1368577021520161001?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_59_33-11913494540067981161?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_18_08_34-10144975286951716471?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_04_24-16189709903486854306?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_14_17-5083473466316392370?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_22_45-12006382598099008006?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_31_28-3437288011358878883?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_40_03-13153489057805649837?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_48_07-3740849493610366039?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_17_56_36-17582064230304697034?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_18_04_59-10999094247909594955?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_18_14_53-9117103510018230435?project=apache-beam-testing

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xml
----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 4919.776s

FAILED (SKIP=8, errors=7)

> Task :sdks:python:test-suites:dataflow:py38:postCommitIT FAILED

FAILURE: Build completed with 3 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

3: Task failed with an exception.
-----------
* Where:
Script '<https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/test-suites/dataflow/common.gradle'> line: 126

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py38:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 25m 24s
207 actionable tasks: 144 executed, 59 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/pwgxdiiqkzygu

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Jenkins build is back to normal : beam_PostCommit_Python38 #1480

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1480/display/redirect?page=changes>


---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1479

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1479/display/redirect>

Changes:


------------------------------------------
[...truncated 23.21 MB...]
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:32:54.053Z: JOB_MESSAGE_BASIC: Finished operation write/BigQueryBatchFileLoads/RemoveTempTables/Impulse/Read+write/BigQueryBatchFileLoads/RemoveTempTables/PassTables/PassTables+write/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Reify+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:32:54.123Z: JOB_MESSAGE_BASIC: Executing operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:32:54.262Z: JOB_MESSAGE_BASIC: Finished operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:32:54.332Z: JOB_MESSAGE_BASIC: Executing operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Read+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/GroupByWindow+write/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames/Keys+write/BigQueryBatchFileLoads/RemoveTempTables/Delete
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:32:55.575Z: JOB_MESSAGE_BASIC: Finished operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Read+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/GroupByWindow+write/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames/Keys+write/BigQueryBatchFileLoads/RemoveTempTables/Delete
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:32:55.635Z: JOB_MESSAGE_DEBUG: Executing success step success48
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:32:55.723Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:32:55.766Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:32:55.789Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:32:58.045Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:32:58.064Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:34:54.727Z: JOB_MESSAGE_BASIC: Finished operation Create/Read+metrics+map_to_common_key+GroupByKey/Reify+GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:34:54.807Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:34:54.877Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:34:54.947Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:34:59.019Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:34:59.093Z: JOB_MESSAGE_DEBUG: Executing success step success11
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:34:59.177Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:34:59.236Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:34:59.260Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:35:12.287Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:35:12.325Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:35:12.351Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-26_06_26_52-2694798485056318199 is in state JOB_STATE_DONE
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Attempting to perform query SELECT bytes, date, time FROM python_write_to_table_16273059986599.python_no_schema_table to BQ
DEBUG:google.auth._default:Checking None for explicit credentials as part of auth process...
DEBUG:google.auth._default:Checking Cloud SDK credentials as part of auth process...
DEBUG:google.auth._default:Cloud SDK credentials not found on disk; not using them
DEBUG:google.auth._default:Checking for App Engine runtime as part of auth process...
DEBUG:google.auth._default:No App Engine library was found so cannot authentication via App Engine Identity Credentials.
DEBUG:google.auth.transport._http_client:Making request: GET http://169.254.169.254
DEBUG:google.auth.transport._http_client:Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
DEBUG:urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): metadata.google.internal:80
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 1083
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs?prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/09282004-eb8f-4454-b8a3-61d7102d298d?maxResults=0&timeoutMs=10000&location=US&prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/09282004-eb8f-4454-b8a3-61d7102d298d?fields=jobReference%2CtotalRows%2CpageToken%2Crows&location=US&formatOptions.useInt64Timestamp=True&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Result of query is: [(b'xyw', datetime.date(2011, 1, 1), datetime.time(23, 59, 59, 999999)), (b'\xe4\xbd\xa0\xe5\xa5\xbd', datetime.date(3000, 12, 31), datetime.time(23, 59, 59)), (b'abc', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xab\xac\xad', datetime.date(2000, 1, 1), datetime.time(0, 0))]
INFO:apache_beam.io.gcp.bigquery_write_it_test:Deleting dataset python_write_to_table_16273059986599 in project apache-beam-testing
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:37:12.957Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Create/Read+assert_that/Group/_CoGBKImpl/Tag[0]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:37:13.165Z: JOB_MESSAGE_BASIC: Finished operation Create data/Read+Predict UserEvent/ParDo(_PredictUserEventFn)+ParDo(CallableWrapperDoFn)+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/_CoGBKImpl/Tag[1]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:37:13.230Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:37:13.281Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:37:13.347Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:37:12.962Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:37:13.018Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:37:13.059Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:37:18.528Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:37:18.586Z: JOB_MESSAGE_DEBUG: Executing success step success19
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:37:18.653Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:37:18.703Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:37:18.746Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-26_06_28_56-4845701432454546098 is in state JOB_STATE_DONE
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:39:29.525Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:39:29.573Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T13:39:29.618Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-26_06_31_43-11802104475142766257 is in state JOB_STATE_DONE
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... ok
test_run_example_with_setup_file (apache_beam.examples.complete.juliaset.juliaset.juliaset_test_it.JuliaSetTestIT) ... ok
test_streaming_wordcount_debugging_it (apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT) ... SKIP: Skipped due to [BEAM-3377]: assert_that not working for streaming
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_flight_delays (apache_beam.examples.dataframe.flight_delays_it_test.FlightDelaysTest) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... ok
test_read_via_sql (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_via_table (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_queries (apache_beam.io.gcp.bigquery_read_it_test.ReadAllBQTests) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bqfl_streaming (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... SKIP: TestStream is not supported on TestDataflowRunner
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_spanner_error (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_spanner_update (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_write_batches (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_aggregation (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_enrich (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_avro_file_load (apache_beam.io.gcp.bigquery_test.BigQueryFileLoadsIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_value_provider_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_transform_on_gcs (apache_beam.io.fileio_test.MatchIntegrationTest) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_dicom_search_instances (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_dicom_store_instance_from_gcs (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_analyzing_syntax (apache_beam.ml.gcp.naturallanguageml_test_it.NaturalLanguageMlTestIT) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_avro (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_deidentification (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_inspection (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_basic_execution (apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests) ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream supports emitting to multiple PCollections. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream can independently control output watermarks. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
test_text_detection_with_language_hint (apache_beam.ml.gcp.visionml_test_it.VisionMlTestIT) ... ok
test_label_detection_with_video_context (apache_beam.ml.gcp.videointelligenceml_test_it.VideoIntelligenceMlTestIT) ... ok
test_file_loads (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_streaming_inserts (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_job_python_from_python_it (apache_beam.transforms.external_it_test.ExternalTransformIT) ... ok
test_big_query_write (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_new_types (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_schema_autodetect (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... SKIP: DataflowRunner does not support schema autodetection
Test that schema update options are respected when appending to an existing ... ok
test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_metrics_fnapi_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_metrics_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_create_catalog_item (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_create_user_event (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_predict (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xmlWorker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_05_02-15632305428358930056?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_20_35-1746795105655621385?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_29_50-13675682618227941265?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_39_39-6407084805042816161?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_49_02-7854115354434027636?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_58_00-11171683477824050623?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_07_37-8765107099445147177?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_17_05-13860397564235008419?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_26_52-2694798485056318199?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_04_59-14316213318223725743?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_24_50-9493163227893937615?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_33_28-1781049347622528613?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_41_38-238046821529261414?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_50_06-17630435608922407148?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_59_23-3046366725541602670?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_08_42-2641353601068168552?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_18_07-13186645045640984044?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_28_56-4845701432454546098?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_05_00-10253532134743354695?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_18_05-5003769984148369909?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_30_19-12044224695303550614?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_39_34-3778949960106619383?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_48_45-712202855686562391?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_58_12-10855001813605768849?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_06_42-13602531407484444398?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_15_11-4389490557652929624?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_04_56-17147661088170638781?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_23_52-9035099859692393340?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_32_49-14326332075822166563?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_42_12-4023756551327610816?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_50_52-389106102336415505?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_59_25-9800220069442689188?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_07_44-11780349876867128854?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_16_02-14695276621215358111?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_06_58-16267014976553331004?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_16_44-10478792964728611100?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_27_29-12128234209127696466?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_37_33-12422507794031566047?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_46_48-1375184086017709125?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_55_49-8259886941900249133?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_04_38-5690399171288781520?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_14_09-16957359384146219665?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_22_58-2591251823333747014?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_04_57-15744553454169789437?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_13_47-1009547201048250493?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_23_09-7539445464876441197?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_31_42-4798835486914071579?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_41_12-17136550350012862690?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_50_16-16967226157306794958?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_58_40-12888386353189263313?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_07_49-17359628822857128226?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_16_16-7085584371214236838?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_05_00-3128482695610356120?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_14_10-5903353216993402809?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_28_48-15114627125820615878?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_54_44-17783515370927888087?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_13_23-9967011286697384648?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_04_55-10483680184326743307?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_14_41-3484158336048633884?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_25_10-4101782929048458661?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_35_31-6225238109930601661?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_46_48-4359586931601420183?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_05_55_27-11078560468218514930?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_04_48-8361971999356262890?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_14_02-9725265397174713868?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_22_33-290604763394405507?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_06_31_43-11802104475142766257?project=apache-beam-testing

----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 5710.586s

OK (SKIP=8)

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 39m 9s
207 actionable tasks: 144 executed, 59 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/ovvxdzu2h4zdm

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1478

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1478/display/redirect>

Changes:


------------------------------------------
[...truncated 23.17 MB...]
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:33:23.596Z: JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-a...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:33:23.916Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:33:23.986Z: JOB_MESSAGE_DEBUG: Value "assert_that/Group/_CoGBKImpl/GroupByKey/Session" materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:33:24.037Z: JOB_MESSAGE_BASIC: Executing operation Create data/Read+Predict UserEvent/ParDo(_PredictUserEventFn)+ParDo(CallableWrapperDoFn)+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/_CoGBKImpl/Tag[1]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:33:24.059Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Create/Read+assert_that/Group/_CoGBKImpl/Tag[0]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:33:43.446Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:33:43.483Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:33:56.541Z: JOB_MESSAGE_BASIC: Your project already contains 100 Dataflow-created metric descriptors, so new user metrics of the form custom.googleapis.com/* will not be created. However, all user metrics are also available in the metric dataflow.googleapis.com/job/user_counter. If you rely on the custom metrics, you can delete old / unused metric descriptors. See https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list and https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:34:10.691Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 based on the rate of progress in the currently running stage(s).
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:34:34.356Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:34:34.383Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:35:43.345Z: JOB_MESSAGE_BASIC: Finished operation Create/Read+metrics+map_to_common_key+GroupByKey/Reify+GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:35:43.421Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:35:43.480Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:35:43.541Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:35:46.908Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:35:46.992Z: JOB_MESSAGE_DEBUG: Executing success step success11
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:35:47.074Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:35:47.116Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:35:47.159Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:35:55.136Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:35:55.175Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:35:55.216Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-26_00_26_33-8273221954222678022 is in state JOB_STATE_DONE
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Attempting to perform query SELECT bytes, date, time FROM python_write_to_table_1627284379962.python_no_schema_table to BQ
DEBUG:google.auth._default:Checking None for explicit credentials as part of auth process...
DEBUG:google.auth._default:Checking Cloud SDK credentials as part of auth process...
DEBUG:google.auth._default:Cloud SDK credentials not found on disk; not using them
DEBUG:google.auth._default:Checking for App Engine runtime as part of auth process...
DEBUG:google.auth._default:No App Engine library was found so cannot authentication via App Engine Identity Credentials.
DEBUG:google.auth.transport._http_client:Making request: GET http://169.254.169.254
DEBUG:google.auth.transport._http_client:Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
DEBUG:urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): metadata.google.internal:80
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 244
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs?prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/64a66598-2f48-4bcb-81b0-ecd439d49f33?maxResults=0&timeoutMs=10000&location=US&prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/64a66598-2f48-4bcb-81b0-ecd439d49f33?fields=jobReference%2CtotalRows%2CpageToken%2Crows&location=US&formatOptions.useInt64Timestamp=True&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Result of query is: [(b'xyw', datetime.date(2011, 1, 1), datetime.time(23, 59, 59, 999999)), (b'\xab\xac\xad', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'abc', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xe4\xbd\xa0\xe5\xa5\xbd', datetime.date(3000, 12, 31), datetime.time(23, 59, 59))]
INFO:apache_beam.io.gcp.bigquery_write_it_test:Deleting dataset python_write_to_table_1627284379962 in project apache-beam-testing
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:37:58.537Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:37:58.622Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:37:58.658Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-26_00_30_01-6161043334872327620 is in state JOB_STATE_DONE
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:38:49.463Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Create/Read+assert_that/Group/_CoGBKImpl/Tag[0]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:38:49.656Z: JOB_MESSAGE_BASIC: Finished operation Create data/Read+Predict UserEvent/ParDo(_PredictUserEventFn)+ParDo(CallableWrapperDoFn)+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/_CoGBKImpl/Tag[1]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:38:49.718Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:38:49.763Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:38:49.831Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:38:54.786Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:38:54.850Z: JOB_MESSAGE_DEBUG: Executing success step success19
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:38:54.909Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:38:54.958Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:38:54.992Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:41:16.884Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:41:16.940Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T07:41:16.967Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-26_00_33_15-10629785360526088984 is in state JOB_STATE_DONE
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... ok
test_run_example_with_setup_file (apache_beam.examples.complete.juliaset.juliaset.juliaset_test_it.JuliaSetTestIT) ... ok
test_streaming_wordcount_debugging_it (apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT) ... SKIP: Skipped due to [BEAM-3377]: assert_that not working for streaming
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_flight_delays (apache_beam.examples.dataframe.flight_delays_it_test.FlightDelaysTest) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... ok
test_read_via_sql (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_via_table (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_queries (apache_beam.io.gcp.bigquery_read_it_test.ReadAllBQTests) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bqfl_streaming (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... SKIP: TestStream is not supported on TestDataflowRunner
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_spanner_error (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_spanner_update (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_write_batches (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_aggregation (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_enrich (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_avro_file_load (apache_beam.io.gcp.bigquery_test.BigQueryFileLoadsIntegrationTests) ... ok
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_value_provider_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) ... ok
test_transform_on_gcs (apache_beam.io.fileio_test.MatchIntegrationTest) ... ok
test_dicom_search_instances (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_dicom_store_instance_from_gcs (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_analyzing_syntax (apache_beam.ml.gcp.naturallanguageml_test_it.NaturalLanguageMlTestIT) ... ok
test_deidentification (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_inspection (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_label_detection_with_video_context (apache_beam.ml.gcp.videointelligenceml_test_it.VideoIntelligenceMlTestIT) ... ok
test_basic_execution (apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests) ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream supports emitting to multiple PCollections. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream can independently control output watermarks. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_avro (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_text_detection_with_language_hint (apache_beam.ml.gcp.visionml_test_it.VisionMlTestIT) ... ok
test_job_python_from_python_it (apache_beam.transforms.external_it_test.ExternalTransformIT) ... ok
test_file_loads (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_streaming_inserts (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_big_query_write (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_new_types (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_schema_autodetect (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... SKIP: DataflowRunner does not support schema autodetection
Test that schema update options are respected when appending to an existing ... ok
test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_metrics_fnapi_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_metrics_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_create_catalog_item (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_create_user_event (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_predict (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xmlWorker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_05_34-2362905718945460791?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_21_06-6081342131258106295?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_30_31-6813725568516923388?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_41_21-11610757825667232689?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_50_28-16731387567795481655?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_59_04-7352065576643673163?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_07_23-8742463815063397268?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_15_42-7540343705218455687?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_24_36-17465518468154535998?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_33_15-10629785360526088984?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_05_33-2187553312996463264?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_24_37-6472476851990189762?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_32_55-17590921908803372198?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_41_59-18402315751028057416?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_52_11-3917798020222816427?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_02_31-4521691309726480766?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_10_55-8345897789290369222?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_19_23-355196399564446574?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_30_01-6161043334872327620?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_05_30-5687640415916540379?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_23_13-9207614697923531217?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_31_26-8914287896279088419?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_40_30-3650414855572014682?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_49_35-16548118065559168653?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_58_33-16289043920743018884?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_07_37-3378832894891671173?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_15_51-8423169573734907037?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_24_19-17365494920098485948?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_05_34-16971718280111098390?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_18_33-317890719986372835?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_30_53-699363084694756623?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_40_01-14402687958995051915?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_49_17-9852536341065034961?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_59_18-18308235717354484637?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_08_49-55171842506957389?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_18_16-4857999494638347774?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_07_21-2628863015769262676?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_17_42-18087859970225778172?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_28_07-7342933178128160346?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_38_46-3372940283026419911?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_49_09-8608915274698488050?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_58_24-10117040535986675457?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_07_06-13182706281888096392?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_16_53-2619079681339973927?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_05_33-9163344746447030907?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_14_58-2754844491490005439?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_29_16-15840028098072863777?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_56_29-6837435597756284202?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_15_05-6612708292189325184?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_05_32-4634911533744411714?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_14_21-9631083406244887493?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_23_22-8116862796233999403?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_31_45-2545186712822491619?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_40_11-4481980440598320610?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_48_51-2857733475101811768?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_57_12-17542462394345403428?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_06_38-12488625111758611101?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_16_26-3919648197105116592?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_26_33-8273221954222678022?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_05_30-5673934091608252391?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_15_05-5534506615796182213?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_25_03-12117194442264798182?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_35_35-10347584109123702993?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_47_16-10587310143115274998?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_23_57_45-11846440910979061469?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_06_59-1495979348729692729?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-26_00_16_25-103897458289446120?project=apache-beam-testing

----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 5775.605s

OK (SKIP=8)

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 40m 55s
207 actionable tasks: 144 executed, 59 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/6gtx3fadhg52k

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1477

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1477/display/redirect>

Changes:


------------------------------------------
[...truncated 23.12 MB...]
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:33:52.850Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:33:52.917Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:33:57.240Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:33:57.315Z: JOB_MESSAGE_DEBUG: Executing success step success11
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:33:57.392Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:33:57.439Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:33:57.481Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:35:54.755Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:35:54.819Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:35:54.854Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:36:11.067Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:36:11.111Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:36:11.147Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-25_18_28_12-4989898018648254710 is in state JOB_STATE_DONE
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-25_18_27_13-4797004842909030515 is in state JOB_STATE_DONE
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Attempting to perform query SELECT bytes, date, time FROM python_write_to_table_16272628191097.python_no_schema_table to BQ
DEBUG:google.auth._default:Checking None for explicit credentials as part of auth process...
DEBUG:google.auth._default:Checking Cloud SDK credentials as part of auth process...
DEBUG:google.auth._default:Cloud SDK credentials not found on disk; not using them
DEBUG:google.auth._default:Checking for App Engine runtime as part of auth process...
DEBUG:google.auth._default:No App Engine library was found so cannot authentication via App Engine Identity Credentials.
DEBUG:google.auth.transport._http_client:Making request: GET http://169.254.169.254
DEBUG:google.auth.transport._http_client:Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
DEBUG:urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): metadata.google.internal:80
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 244
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs?prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/9b13b6af-1fde-408c-b0d8-38ad47722b1a?maxResults=0&timeoutMs=10000&location=US&prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/9b13b6af-1fde-408c-b0d8-38ad47722b1a?fields=jobReference%2CtotalRows%2CpageToken%2Crows&location=US&formatOptions.useInt64Timestamp=True&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Result of query is: [(b'xyw', datetime.date(2011, 1, 1), datetime.time(23, 59, 59, 999999)), (b'abc', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xab\xac\xad', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xe4\xbd\xa0\xe5\xa5\xbd', datetime.date(3000, 12, 31), datetime.time(23, 59, 59))]
INFO:apache_beam.io.gcp.bigquery_write_it_test:Deleting dataset python_write_to_table_16272628191097 in project apache-beam-testing
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:37:56.394Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Create/Read+assert_that/Group/_CoGBKImpl/Tag[0]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:37:56.604Z: JOB_MESSAGE_BASIC: Finished operation Create data/Read+Predict UserEvent/ParDo(_PredictUserEventFn)+ParDo(CallableWrapperDoFn)+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/_CoGBKImpl/Tag[1]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:37:56.710Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:37:56.793Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:37:56.886Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:38:00.830Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:38:00.917Z: JOB_MESSAGE_DEBUG: Executing success step success19
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:38:01.025Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:38:01.092Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:38:01.127Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:40:19.697Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:40:19.753Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-26T01:40:19.801Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-25_18_32_02-4034613645589300320 is in state JOB_STATE_DONE
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... ok
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_run_example_with_setup_file (apache_beam.examples.complete.juliaset.juliaset.juliaset_test_it.JuliaSetTestIT) ... ok
test_streaming_wordcount_debugging_it (apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT) ... SKIP: Skipped due to [BEAM-3377]: assert_that not working for streaming
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_flight_delays (apache_beam.examples.dataframe.flight_delays_it_test.FlightDelaysTest) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... ok
test_read_via_sql (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_via_table (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_queries (apache_beam.io.gcp.bigquery_read_it_test.ReadAllBQTests) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_spanner_error (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_spanner_update (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_write_batches (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_bqfl_streaming (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... SKIP: TestStream is not supported on TestDataflowRunner
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_aggregation (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_enrich (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_avro_file_load (apache_beam.io.gcp.bigquery_test.BigQueryFileLoadsIntegrationTests) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_value_provider_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) ... ok
test_transform_on_gcs (apache_beam.io.fileio_test.MatchIntegrationTest) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_dicom_search_instances (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_dicom_store_instance_from_gcs (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_analyzing_syntax (apache_beam.ml.gcp.naturallanguageml_test_it.NaturalLanguageMlTestIT) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_avro (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_deidentification (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_inspection (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_basic_execution (apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests) ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream supports emitting to multiple PCollections. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream can independently control output watermarks. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
test_text_detection_with_language_hint (apache_beam.ml.gcp.visionml_test_it.VisionMlTestIT) ... ok
test_label_detection_with_video_context (apache_beam.ml.gcp.videointelligenceml_test_it.VideoIntelligenceMlTestIT) ... ok
test_job_python_from_python_it (apache_beam.transforms.external_it_test.ExternalTransformIT) ... ok
test_file_loads (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_streaming_inserts (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_metrics_fnapi_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_metrics_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_big_query_write (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_new_types (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_schema_autodetect (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... SKIP: DataflowRunner does not support schema autodetection
Test that schema update options are respected when appending to an existing ... ok
test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_create_catalog_item (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_create_user_event (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_predict (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xmlWorker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_05_21-1981676945470292566?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_20_48-12395732071680940318?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_30_37-4833764860542561084?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_39_43-1193604162946371406?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_48_44-8594011653975928686?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_58_20-12325512933959160241?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_07_14-3090563292684800915?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_15_33-11485623650112613255?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_05_21-5518430946793033190?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_24_51-17779507536588399896?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_32_54-14128413384505082800?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_41_13-17805406059809627817?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_49_34-12878557808714934201?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_58_48-16876235770804733789?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_08_06-11670172854137562960?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_17_34-3254561330823532408?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_28_12-4989898018648254710?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_05_21-2757704167799076667?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_18_13-16649440018277428687?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_30_23-13523580315262583808?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_40_24-8995119750209818085?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_49_48-494600604816447915?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_59_39-2078385709636043486?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_07_53-4486094748746360514?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_16_16-9832994966855971156?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_05_15-12206000987868447362?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_23_29-10328831390239210625?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_31_37-8294540414322721427?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_40_46-17725980042092252189?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_49_50-9817427321600892431?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_58_09-14445221879650602670?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_07_02-16890792373274277892?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_15_35-2541608769435171817?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_07_37-15593664750985453925?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_17_48-16501773669138110396?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_28_30-8812696804275960041?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_37_27-5580665481390944174?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_46_45-3893513757929682616?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_56_31-8902314890643096764?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_05_19-13307906824051284514?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_14_36-11273618929155710165?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_23_10-5481144953855928562?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_05_16-14598905447901660853?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_14_48-12311097190592433968?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_23_39-17382288268039408783?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_31_56-11409552828613214354?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_40_13-13336812990378541121?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_48_47-2678970808265510832?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_57_23-17962325081475040222?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_06_49-5496935260126921504?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_17_31-14308502952895290940?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_27_13-4797004842909030515?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_05_20-3802242340434527108?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_15_17-16491055163578129302?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_25_20-8161239033063174969?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_35_45-7941924432664523749?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_47_10-10606939845534871822?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_55_39-16669537594537535064?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_04_35-14840683649053775077?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_13_48-14400878576648756815?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_22_45-5569105731149654929?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_32_02-4034613645589300320?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_05_17-13833178727560068545?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_15_03-2729828986138386724?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_30_25-18371425293756153431?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_17_55_38-17702673690507922625?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_18_14_03-13835809657940371110?project=apache-beam-testing

----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 5755.857s

OK (SKIP=8)

FAILURE: Build completed with 3 failures.

1: Task failed with an exception.
-----------
* Where:
Script '<https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/test-suites/direct/common.gradle'> line: 144

* What went wrong:
Execution failed for task ':sdks:python:test-suites:direct:py38:hdfsIntegrationTest'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

3: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 40m 4s
207 actionable tasks: 144 executed, 59 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/4nvt562sndhei

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1476

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1476/display/redirect>

Changes:


------------------------------------------
[...truncated 23.22 MB...]
            "type": "STRING",
            "value": "<lambda>"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_7"
                    },
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_7"
                    }
                  ],
                  "is_pair_like": true,
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_7"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "Assert Checksums/Unkey.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s28"
        },
        "serialized_fn": "QlpoOTFBWSZTWV9dO3MAAfp/6H3/fHtIY4pFjevcgr//3+ZrxA4AEABAAr3Y3RpqcglTJNCT1PUeiNpNAYQyAAbU0NDQAaMQAA1NE9BDQmiaaekNHpGQaGmgAAAAAA0DESp+pHpA00aaZNA0MQBoaDQAAGgDQ9QJTUCjGpoE09Iag2o9J6mnqaaZNAMnqAAPUyaBppBNoDEanft2k4jucZINPPm/kGmhx0/nK8mwhIGqRATMQIpgFOG6wDpCWoshGhGq+EifMd6C6/B0UQoq68OG+L6jRDvJh4hcQElok7mlfY0EdgNZ/A9fpx6+aVilMKdTR4zxcdDmFcxMvC1xInXeBWMZFFo0e2fu1lh8o2VCEyklSBgYbU1uXIu2JoJ1BCKrHvW7Ln3CjsMa0uPCMZsfBr81wYt13DOHHyadPpf7CWGmpkPKYcpA3cGWIDgnAdumIRmhrkgBKnUqR6itbXoAm0QucksoniEBpxPmXjYMnV1tAFhAV8YoZ8l+KPnCFVvyBLNHx4EWnRLRtSJFDp3XLCYuFXpa4PN+sqZ/usC0R7zcW+5nJqFuLH8XNYBfuGUsDHZC6EHixpoYuYWSA7e3gf1m1bL226cYuyBZzAnlAqZ1hbyRS1SVa8MLUWnBfnPPoj6tFGSIw7TK7E5MJEpJRV4KYsRdFiaZBBzEgT0C0Dk0NSy1lPDM2+oPEFMUQa71yYZMX3LfiubQV7JrAfCEgnEBXM9GPGtC+F0GBcdQ8FXQS4wwe0CES2GKDFkKsm7A5nm4Z6ODARDlJGcgVExApl9+wqIOhkRK8TGxtYMEFYCu+wAtG/AwRsiILNDd5MoeAsWcwGSLvSMAfQPFJ47rcQqJku2hPMLNRwnULxpWCGCmJAIoRcCGjn0CMebRIhCCLIyL1QO8orcRmIh3maUEg960VBAKk0E8G4IsQB7y3T+MQv3F0LFi7OkWdVklkmlO5ZgT3/kIkVeUAqkkowhzEnNnrDMNW6pyGyVZTEkuXGmVyWMMr75WzG4TzhI1SgatpJLD/f4u5IpwoSC+unbm",
        "user_name": "Assert Checksums/Unkey"
      }
    },
    {
      "kind": "ParallelDo",
      "name": "s30",
      "properties": {
        "display_data": [
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.ParDo",
            "shortValue": "CallableWrapperDoFn",
            "type": "STRING",
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          },
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
            "type": "STRING",
            "value": "_equal"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_7"
                    },
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_7"
                    }
                  ],
                  "is_pair_like": true,
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_7"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "Assert Checksums/Match.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s29"
        },
        "serialized_fn": "QlpoOTFBWSZTWWKkpeAAAzD//n///8vWd//3P//c57////Z/9Q5BcAAAAQABARBQBF7c94d2vXe7bNXXt7boaCTRGmlNknqHpMJ6TDJlGT00nqNGEaZGIxPUGJ4poeoYjNqgaZDATBqZTEYplPSnmEaCjxCeoAHqekyYgABoAAGgAA2k02oAAaJo0IR6lP1TajwSNPU9RtJ6geo0PU0AAAAAAAAAMgAD1BppRHqnlDTRkDTQAAaaAAAAAAAAAAAAAABKmiap6ap5T8TJMk2KeoPSPU0PRqepppiegjRieoZDIAwJoNDRk0ekZMmTHUFqqrnGtjCs0ynbBZtjweFjSRBtDux6eePtG+r3pAru2Y30IvyKLNuyTz2yqCwkKguqSCQ+WpNLr4rR4ymBDUn00tAeaKza5ronAeHyTxSaAn/PaSRiYRJYtdhs36xCCnD9Q0HOMOc5FsntHk8wFofdIHYqKSPecSJ7ikYGoc9ATGKSMBgtqQ0r0oHgBRgKKQCUDCG5NXk1EKywb1CEZkwipC5ImHFzBuJY8ICcGKEQ4iXBkJOEg8hkTW5PWkjT24TRQm1CpkZiUj9wHAsLUCgxZFUMDzgrVrNfVxK/SqtcgO0Dt22taMrrPOAWYrrQ5GiNw5lcp17LDRL9FLL0+Jj8YJSw8uW3szHs2bgsqPYUY4xTqxgwinUdSOrHMHqPC/GySOM/KhTGz9gEIAUg2LOk7Hsq4ot01eBbk6ZCG/UR45MkRisWjQcKAMWl1PBJX3AwT4rqxnsmKl+UR0du5bihAss7EgYqdyuCFL8HozNMe4tWAoEgiEAkyBQA8HvsWO6uuLV21f3Kr0cmNMxA7lBgwKPxysiV1cAmAFIY52C/INrRDCj8UQAI2bAhaAaQuKMlMNPuO3f//THRT10RS4YVo6kZCDzb+JjiKR6InFo6RLo1wOYOB0gD2mJia8wT1K8cxgedVvDt1Yk1/MAiBm5r7GJrILpVxKYgNIcFhQTxco2U13IJKaSvDBxMNoO0CFDaoHZSGxP0gI3CnQHAHHsVJC4VCq65mzjlKAQ5kAeLco/CxRDAVlFssG9RwXWRTbn5CVJrRmJngyJW1QYHVkw0xCl57cF4NPYgSCYYJ3XDB/EFFXnFZ+whN/IfU1RGtOq4OUtD5kZkGgQkAolchkXUYwQn8/KhwPg6wwAMB2lDEDGmAGOUqLBYOLUSARikBFWHEYgLx0o30YcnHgKkYwQJLUVymqmXQ+EptBUj8iNyH6QQOkwDbBeKoRckQJ1y6+N3Wjs5I0BRBa/EAB6uW9wnAbl6NtQPnZQSqYEAxpQiS3Yg22vCAkDzGMEZ2ZQsmkxdBamFYYVa8+Fy4nk0eDkLtDP1suIb4MFGvFIJgQBg6ohrsC41Gq/P/r2OWe7CwFWjFupJ10PDcglcDIzZqtIzVgQLnuiKFvRZhODym4DLAkCUQDasCyRdGZ5RwwT2Q2MPcsCEHrM2cO8jDY3ruZYpYZxNKH3B+hNYMxqOg6Z9qYQeoiOQM4BTBhmBfiBUo3azZXLPazHXCshGjJfP/scarSO5Q+rWcWpBWsshHG9SpoSAzAZZSohYiAIkF5Gt/ppi+IJwyhIJRHSEUsj1QlGoxa8tkIc9Aw1dQqB0nkAEgyIcT/N+r0GOUKQMLRpzoYmJFFK+i458ZoxCRAhsFXnS8tArJuUKEHbY20pvEGMPmYx0sjRvKOflUnIYcMrXaM3QTyn4sLZiS05JVvncXUqy7A10nMK8tjIWNTJwg8W689tGi0avhObD2HjpytYmxy5olyP+LuSKcKEgxUlLwA==",
        "user_name": "Assert Checksums/Match"
      }
    }
  ],
  "type": "JOB_TYPE_BATCH"
}
root: DEBUG: Response returned status 503, retrying
root: DEBUG: Retrying request to url https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json after exception HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Sun, 25 Jul 2021 19:01:05 GMT', 'server': 'ESF', 'cache-control': 'private', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '503', 'content-length': '122', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 503,
    "message": "The service is currently unavailable.",
    "status": "UNAVAILABLE"
  }
}
>
--------------------- >> end captured logging << ---------------------
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_06_22-688451763021622157?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_25_53-3212633804348056395?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_34_12-10869663304701876429?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_42_11-595250262719336669?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_50_33-5386546112866752284?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_58_54-4095731404190510478?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_07_22-187372854896504603?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_15_59-13515762378930884530?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_06_24-8656169325629568424?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_21_54-12148606031076360949?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_30_59-3666633961446309767?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_41_22-9361306116869984151?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_50_57-15670584139150911716?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_01_21-9133098570774544827?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_11_00-9452411464406563347?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_20_06-12880080220998601543?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_28_34-14602143438672383956?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_06_18-12565439381665589333?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_24_08-8317732749980698482?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_32_16-9359231528121424317?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_41_37-9009326818791977326?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_50_16-1591889569320530582?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_58_59-7748334757332248910?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_08_35-10373603813944218146?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_17_54-11955314296183953704?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_27_24-7157512768273814313?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_06_23-8180179894960330444?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_19_20-8325894802763581127?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_31_30-4988685081821057608?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_40_00-4464529635847624409?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_49_07-4497114426683583132?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_59_07-12052232070487616776?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_08_48-1403434161469693953?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_18_13-1008653373411119019?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_28_56-4500363189144627000?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_08_21-9329869993352444515?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_18_27-531251577373197510?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_29_24-4012420295450348774?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_39_11-7895665586248827007?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_49_00-7309295766964612892?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_58_46-2446063774309043949?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_07_17-15451669759493073085?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_17_13-13365963108064298086?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_06_19-3085051219285286200?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_15_07-3136548184973715099?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_24_03-17447870812082313697?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_32_23-1447131558463058186?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_41_30-5268697824765174465?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_50_03-10973038463950923882?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_58_22-13201058900364520713?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_07_35-9661123195612522744?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_16_48-1791211922850409881?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_06_22-3243083808351240945?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_15_46-3312039883903888143?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_25_34-4110237982263075344?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_36_40-3910823616804310118?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_47_45-16767131545180407157?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_56_15-10289847021630079206?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_05_03-11159355551667557398?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_13_45-16174058167844246238?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_22_20-4896762892637102119?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_06_18-2582068396757710952?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_15_29-10760658125196485713?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_30_21-2200571849335670767?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_11_55_37-16816521219896996418?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_12_14_35-9925206101186967360?project=apache-beam-testing

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xml
----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 5508.610s

FAILED (SKIP=8, errors=1)

> Task :sdks:python:test-suites:dataflow:py38:postCommitIT FAILED

FAILURE: Build completed with 3 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

3: Task failed with an exception.
-----------
* Where:
Script '<https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/test-suites/dataflow/common.gradle'> line: 126

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py38:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 36m 53s
207 actionable tasks: 144 executed, 59 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/lwia4mx2qc7va

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1475

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1475/display/redirect>

Changes:


------------------------------------------
[...truncated 23.16 MB...]
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:34:13.665Z: JOB_MESSAGE_DEBUG: Value "write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Session" materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:34:13.736Z: JOB_MESSAGE_BASIC: Executing operation write/BigQueryBatchFileLoads/RemoveTempTables/Impulse/Read+write/BigQueryBatchFileLoads/RemoveTempTables/PassTables/PassTables+write/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Reify+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:34:17.428Z: JOB_MESSAGE_BASIC: Finished operation write/BigQueryBatchFileLoads/RemoveTempTables/Impulse/Read+write/BigQueryBatchFileLoads/RemoveTempTables/PassTables/PassTables+write/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Reify+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:34:17.481Z: JOB_MESSAGE_BASIC: Executing operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:34:17.536Z: JOB_MESSAGE_BASIC: Finished operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:34:17.590Z: JOB_MESSAGE_BASIC: Executing operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Read+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/GroupByWindow+write/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames/Keys+write/BigQueryBatchFileLoads/RemoveTempTables/Delete
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:34:18.861Z: JOB_MESSAGE_BASIC: Finished operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Read+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/GroupByWindow+write/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames/Keys+write/BigQueryBatchFileLoads/RemoveTempTables/Delete
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:34:18.915Z: JOB_MESSAGE_DEBUG: Executing success step success48
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:34:18.987Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:34:19.033Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:34:19.061Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:35:36.951Z: JOB_MESSAGE_BASIC: Finished operation Create/Read+metrics+map_to_common_key+GroupByKey/Reify+GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:35:37.016Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:35:37.076Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:35:37.132Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:35:42.236Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:35:42.298Z: JOB_MESSAGE_DEBUG: Executing success step success11
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:35:42.365Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:35:42.411Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:35:42.451Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:36:34.224Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Create/Read+assert_that/Group/_CoGBKImpl/Tag[0]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:36:34.374Z: JOB_MESSAGE_BASIC: Finished operation Create data/Read+Predict UserEvent/ParDo(_PredictUserEventFn)+ParDo(CallableWrapperDoFn)+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/_CoGBKImpl/Tag[1]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:36:34.423Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:36:34.479Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:36:34.558Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:36:37.690Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:36:37.734Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:36:37.768Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:36:39.610Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:36:39.670Z: JOB_MESSAGE_DEBUG: Executing success step success19
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:36:39.731Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:36:39.795Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:36:39.829Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-25_06_27_26-18133000312399108909 is in state JOB_STATE_DONE
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Attempting to perform query SELECT bytes, date, time FROM python_write_to_table_16272196325415.python_no_schema_table to BQ
DEBUG:google.auth._default:Checking None for explicit credentials as part of auth process...
DEBUG:google.auth._default:Checking Cloud SDK credentials as part of auth process...
DEBUG:google.auth._default:Cloud SDK credentials not found on disk; not using them
DEBUG:google.auth._default:Checking for App Engine runtime as part of auth process...
DEBUG:google.auth._default:No App Engine library was found so cannot authentication via App Engine Identity Credentials.
DEBUG:google.auth.transport._http_client:Making request: GET http://169.254.169.254
DEBUG:google.auth.transport._http_client:Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
DEBUG:urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): metadata.google.internal:80
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 244
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs?prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/b444901c-f0aa-4cd0-9ff5-0077d92b921f?maxResults=0&timeoutMs=10000&location=US&prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/b444901c-f0aa-4cd0-9ff5-0077d92b921f?fields=jobReference%2CtotalRows%2CpageToken%2Crows&location=US&formatOptions.useInt64Timestamp=True&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Result of query is: [(b'xyw', datetime.date(2011, 1, 1), datetime.time(23, 59, 59, 999999)), (b'abc', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xab\xac\xad', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xe4\xbd\xa0\xe5\xa5\xbd', datetime.date(3000, 12, 31), datetime.time(23, 59, 59))]
INFO:apache_beam.io.gcp.bigquery_write_it_test:Deleting dataset python_write_to_table_16272196325415 in project apache-beam-testing
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:37:59.975Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:38:00.021Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:38:00.059Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-25_06_30_02-744461350862588555 is in state JOB_STATE_DONE
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:39:06.067Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:39:06.103Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T13:39:06.138Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-25_06_30_41-13186927704767619156 is in state JOB_STATE_DONE
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... ok
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_run_example_with_setup_file (apache_beam.examples.complete.juliaset.juliaset.juliaset_test_it.JuliaSetTestIT) ... ok
test_streaming_wordcount_debugging_it (apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT) ... SKIP: Skipped due to [BEAM-3377]: assert_that not working for streaming
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_flight_delays (apache_beam.examples.dataframe.flight_delays_it_test.FlightDelaysTest) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... ok
test_read_via_sql (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_via_table (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_queries (apache_beam.io.gcp.bigquery_read_it_test.ReadAllBQTests) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bqfl_streaming (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... SKIP: TestStream is not supported on TestDataflowRunner
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_spanner_error (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_spanner_update (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_write_batches (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_aggregation (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_enrich (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_avro_file_load (apache_beam.io.gcp.bigquery_test.BigQueryFileLoadsIntegrationTests) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_value_provider_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) ... ok
test_transform_on_gcs (apache_beam.io.fileio_test.MatchIntegrationTest) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_dicom_search_instances (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_dicom_store_instance_from_gcs (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_analyzing_syntax (apache_beam.ml.gcp.naturallanguageml_test_it.NaturalLanguageMlTestIT) ... ok
test_deidentification (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_inspection (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_avro (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_basic_execution (apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests) ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream supports emitting to multiple PCollections. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream can independently control output watermarks. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
test_label_detection_with_video_context (apache_beam.ml.gcp.videointelligenceml_test_it.VideoIntelligenceMlTestIT) ... ok
test_text_detection_with_language_hint (apache_beam.ml.gcp.visionml_test_it.VisionMlTestIT) ... ok
test_file_loads (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_streaming_inserts (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_job_python_from_python_it (apache_beam.transforms.external_it_test.ExternalTransformIT) ... ok
test_big_query_write (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_new_types (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_schema_autodetect (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... SKIP: DataflowRunner does not support schema autodetection
Test that schema update options are respected when appending to an existing ... ok
test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_metrics_fnapi_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_metrics_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_create_catalog_item (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_create_user_event (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_predict (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xmlWorker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_05_54-2450342077225282206?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_21_22-14574625348123852158?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_30_42-9835206647821619649?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_40_00-17517677768170370925?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_48_53-6452743258755911246?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_58_51-2535924828078071302?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_08_37-16698870872244784732?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_18_05-8392055352709550616?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_27_26-18133000312399108909?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_05_53-11141463373926380184?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_18_47-4347379742777212445?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_30_39-6624584504710632704?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_41_00-5808216179245416090?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_50_29-3856095285031350202?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_58_54-6451190213706895594?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_07_28-12000109874897315221?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_16_07-6192349876317932669?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_05_53-14775220231516150777?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_25_50-17414183940786020936?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_34_08-6943760542344831880?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_42_54-5566158282512225281?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_52_39-15539632586286821371?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_02_35-16850604817271608148?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_11_03-9460790799931759015?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_19_29-6381853951476023338?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_30_02-744461350862588555?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_05_47-11930280167431748502?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_24_03-5208037721399336786?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_32_26-13069047253168013497?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_42_03-16743756682598992386?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_51_13-11925628115631218440?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_59_55-862137466395869547?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_09_51-8029056826372949076?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_19_20-9474813473424352702?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_07_53-9647392354567797962?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_17_40-6622238651618186578?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_28_12-3145447864802299579?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_37_15-4029173614082004549?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_47_13-1437173875681841992?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_56_55-5351156936193836223?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_05_42-9159383768565628461?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_15_24-10445853420568206260?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_05_50-12321091297674410646?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_15_13-13356256877397716415?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_25_12-6599193123039381246?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_35_44-14778648990315235051?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_46_45-1404677685507751519?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_55_15-17306212251246942789?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_03_50-11227858350228208325?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_13_29-10570000463283215492?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_21_57-3003194517542827072?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_30_41-13186927704767619156?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_05_56-13502718602085279653?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_14_57-16049433647164106346?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_23_37-2037906285791860484?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_31_46-15815541714496036985?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_40_53-17273178918511748280?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_49_33-7024795639624584217?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_57_52-3755709081984195230?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_07_01-2180444723423099721?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_15_29-4251842126052599840?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_23_52-17063743759952505001?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_05_50-476608124373976766?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_15_02-3473590347580122689?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_29_59-5471233698911696751?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_05_54_45-2315395506382767298?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_06_13_18-15908717863841782995?project=apache-beam-testing

----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 5664.631s

OK (SKIP=8)

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 38m 49s
207 actionable tasks: 144 executed, 59 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/sotdfn224mcj2

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1474

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1474/display/redirect>

Changes:


------------------------------------------
[...truncated 23.17 MB...]
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:31:56.158Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:31:56.195Z: JOB_MESSAGE_DEBUG: Starting worker pool setup.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:31:56.228Z: JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-c...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:06.812Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:06.900Z: JOB_MESSAGE_DEBUG: Value "assert_that/Group/_CoGBKImpl/GroupByKey/Session" materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:06.970Z: JOB_MESSAGE_BASIC: Executing operation Create data/Read+Predict UserEvent/ParDo(_PredictUserEventFn)+ParDo(CallableWrapperDoFn)+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/_CoGBKImpl/Tag[1]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:07.005Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Create/Read+assert_that/Group/_CoGBKImpl/Tag[0]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:21.984Z: JOB_MESSAGE_BASIC: Finished operation Create/Read+metrics+map_to_common_key+GroupByKey/Reify+GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:22.047Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:22.103Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:22.170Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:26.531Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:26.586Z: JOB_MESSAGE_DEBUG: Executing success step success11
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:26.659Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:26.721Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:26.761Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:32.969Z: JOB_MESSAGE_BASIC: Your project already contains 100 Dataflow-created metric descriptors, so new user metrics of the form custom.googleapis.com/* will not be created. However, all user metrics are also available in the metric dataflow.googleapis.com/job/user_counter. If you rely on the custom metrics, you can delete old / unused metric descriptors. See https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list and https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:37.452Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 based on the rate of progress in the currently running stage(s).
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:45.565Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:45.611Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:32:45.645Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-25_00_24_15-8153117217245501823 is in state JOB_STATE_DONE
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Attempting to perform query SELECT bytes, date, time FROM python_write_to_table_16271978411557.python_no_schema_table to BQ
DEBUG:google.auth._default:Checking None for explicit credentials as part of auth process...
DEBUG:google.auth._default:Checking Cloud SDK credentials as part of auth process...
DEBUG:google.auth._default:Cloud SDK credentials not found on disk; not using them
DEBUG:google.auth._default:Checking for App Engine runtime as part of auth process...
DEBUG:google.auth._default:No App Engine library was found so cannot authentication via App Engine Identity Credentials.
DEBUG:google.auth.transport._http_client:Making request: GET http://169.254.169.254
DEBUG:google.auth.transport._http_client:Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
DEBUG:urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): metadata.google.internal:80
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 244
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs?prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/61b4ab2d-91bc-4b35-8885-b25fdf8647e1?maxResults=0&timeoutMs=10000&location=US&prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/61b4ab2d-91bc-4b35-8885-b25fdf8647e1?fields=jobReference%2CtotalRows%2CpageToken%2Crows&location=US&formatOptions.useInt64Timestamp=True&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Result of query is: [(b'\xe4\xbd\xa0\xe5\xa5\xbd', datetime.date(3000, 12, 31), datetime.time(23, 59, 59)), (b'xyw', datetime.date(2011, 1, 1), datetime.time(23, 59, 59, 999999)), (b'abc', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xab\xac\xad', datetime.date(2000, 1, 1), datetime.time(0, 0))]
INFO:apache_beam.io.gcp.bigquery_write_it_test:Deleting dataset python_write_to_table_16271978411557 in project apache-beam-testing
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:33:01.063Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:33:01.095Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:34:44.052Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:34:44.094Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:34:44.121Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-25_00_26_47-4946234505157866534 is in state JOB_STATE_DONE
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:37:19.070Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Create/Read+assert_that/Group/_CoGBKImpl/Tag[0]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:37:20.292Z: JOB_MESSAGE_BASIC: Finished operation Create data/Read+Predict UserEvent/ParDo(_PredictUserEventFn)+ParDo(CallableWrapperDoFn)+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/_CoGBKImpl/Tag[1]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:37:20.392Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:37:20.476Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:37:20.563Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:37:25.071Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:37:25.137Z: JOB_MESSAGE_DEBUG: Executing success step success19
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:37:25.224Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:37:25.297Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:37:25.319Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:39:44.767Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:39:44.809Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T07:39:44.852Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-25_00_31_48-2641722960537826191 is in state JOB_STATE_DONE
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... ok
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_run_example_with_setup_file (apache_beam.examples.complete.juliaset.juliaset.juliaset_test_it.JuliaSetTestIT) ... ok
test_streaming_wordcount_debugging_it (apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT) ... SKIP: Skipped due to [BEAM-3377]: assert_that not working for streaming
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_flight_delays (apache_beam.examples.dataframe.flight_delays_it_test.FlightDelaysTest) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... ok
test_read_via_sql (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_via_table (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_queries (apache_beam.io.gcp.bigquery_read_it_test.ReadAllBQTests) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bqfl_streaming (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... SKIP: TestStream is not supported on TestDataflowRunner
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_spanner_error (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_spanner_update (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_write_batches (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_aggregation (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_enrich (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_avro_file_load (apache_beam.io.gcp.bigquery_test.BigQueryFileLoadsIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_value_provider_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_transform_on_gcs (apache_beam.io.fileio_test.MatchIntegrationTest) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_dicom_search_instances (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_dicom_store_instance_from_gcs (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_analyzing_syntax (apache_beam.ml.gcp.naturallanguageml_test_it.NaturalLanguageMlTestIT) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_avro (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_deidentification (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_inspection (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_basic_execution (apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests) ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream supports emitting to multiple PCollections. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream can independently control output watermarks. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
test_label_detection_with_video_context (apache_beam.ml.gcp.videointelligenceml_test_it.VideoIntelligenceMlTestIT) ... ok
test_text_detection_with_language_hint (apache_beam.ml.gcp.visionml_test_it.VisionMlTestIT) ... ok
test_job_python_from_python_it (apache_beam.transforms.external_it_test.ExternalTransformIT) ... ok
test_file_loads (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_streaming_inserts (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_big_query_write (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_new_types (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_schema_autodetect (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... SKIP: DataflowRunner does not support schema autodetection
Test that schema update options are respected when appending to an existing ... ok
test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_metrics_fnapi_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_metrics_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_create_catalog_item (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_create_user_event (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_predict (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xmlWorker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_06_12-1645059772039529740?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_21_35-1968704078003426974?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_30_40-7519826052611770105?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_40_29-12972848753959058307?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_49_26-16346040759602411985?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_57_56-12041891320363735421?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_07_17-1353878070283055789?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_16_40-2947225720238296314?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_26_47-4946234505157866534?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_05_56-9942833335822376086?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_18_55-5601443334194619743?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_30_36-1868496863783877114?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_39_11-14835139774875575488?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_48_03-10186259622665335500?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_57_06-2122647366517889677?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_06_02-14470448622275179093?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_14_53-3763332419362092843?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_24_15-8153117217245501823?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_05_58-11340924059370008033?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_25_03-14379342281803070318?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_33_15-6270703996341480850?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_41_39-11825967479571133107?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_49_36-2545633565788065263?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_58_57-1943689898095469108?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_07_30-4801948573262147437?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_15_43-15393392582830942443?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_05_49-14208059424154977901?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_24_05-3426930181432524162?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_32_22-5771419370269941708?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_40_36-4478152335935791645?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_49_25-12765180697927011988?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_57_47-6633123788240392685?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_06_31-1963108235501548344?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_14_44-10991536457074747591?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_07_58-14288646866475738557?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_17_25-13250664875219816610?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_27_29-12737825615588115939?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_36_36-11180682999600831987?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_45_48-12287611952671731810?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_54_44-2237639516819392526?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_03_37-7823948078051211034?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_13_01-17621938689485067224?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_21_45-10981235955320887408?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_05_59-227385404362728173?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_15_45-125385623066878627?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_25_59-718723253807464410?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_36_31-13797274527670093450?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_47_47-10803229974524593804?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_56_47-13099683219859193655?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_05_41-8234479215769950628?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_14_00-15080033054089767626?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_05_53-8025382840085658809?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_14_44-11966382922594079158?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_23_33-6037178340440523224?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_31_42-6853265517775254167?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_40_20-10015417246965392966?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_48_41-3844681132699386332?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_56_50-9238491505349879642?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_05_14-4297006434033602345?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_14_23-14009002793897134659?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_22_56-13356417659020375798?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_31_48-2641722960537826191?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_05_52-12098625006541142661?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_14_47-6019836205044708656?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_29_33-11982081843634143953?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_23_55_26-15308086435877834154?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-25_00_14_05-7871063811874093238?project=apache-beam-testing

----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 5682.686s

OK (SKIP=8)

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 39m 26s
207 actionable tasks: 144 executed, 59 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/amjxjcfzebihc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1473

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1473/display/redirect>

Changes:


------------------------------------------
[...truncated 23.16 MB...]
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:31:00.133Z: JOB_MESSAGE_DEBUG: Executing wait step start13
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:31:00.181Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:31:00.228Z: JOB_MESSAGE_DEBUG: Starting worker pool setup.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:31:00.249Z: JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-a...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:31:00.757Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:31:00.831Z: JOB_MESSAGE_DEBUG: Value "GroupByKey/Session" materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:31:00.942Z: JOB_MESSAGE_BASIC: Executing operation Create/Read+metrics+map_to_common_key+GroupByKey/Reify+GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:31:08.828Z: JOB_MESSAGE_BASIC: Your project already contains 100 Dataflow-created metric descriptors, so new user metrics of the form custom.googleapis.com/* will not be created. However, all user metrics are also available in the metric dataflow.googleapis.com/job/user_counter. If you rely on the custom metrics, you can delete old / unused metric descriptors. See https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list and https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:31:42.462Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 based on the rate of progress in the currently running stage(s).
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:32:06.967Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:32:07.002Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:32:19.361Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:32:19.402Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:32:19.442Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-24_18_23_45-18099532356155480085 is in state JOB_STATE_DONE
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Attempting to perform query SELECT bytes, date, time FROM python_write_to_table_16271762117390.python_no_schema_table to BQ
DEBUG:google.auth._default:Checking None for explicit credentials as part of auth process...
DEBUG:google.auth._default:Checking Cloud SDK credentials as part of auth process...
DEBUG:google.auth._default:Cloud SDK credentials not found on disk; not using them
DEBUG:google.auth._default:Checking for App Engine runtime as part of auth process...
DEBUG:google.auth._default:No App Engine library was found so cannot authentication via App Engine Identity Credentials.
DEBUG:google.auth.transport._http_client:Making request: GET http://169.254.169.254
DEBUG:google.auth.transport._http_client:Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
DEBUG:urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): metadata.google.internal:80
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 244
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs?prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/1e86cf78-da8c-4e76-ace6-8d38bd9f264d?maxResults=0&timeoutMs=10000&location=US&prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/1e86cf78-da8c-4e76-ace6-8d38bd9f264d?fields=jobReference%2CtotalRows%2CpageToken%2Crows&location=US&formatOptions.useInt64Timestamp=True&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Result of query is: [(b'\xe4\xbd\xa0\xe5\xa5\xbd', datetime.date(3000, 12, 31), datetime.time(23, 59, 59)), (b'xyw', datetime.date(2011, 1, 1), datetime.time(23, 59, 59, 999999)), (b'abc', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xab\xac\xad', datetime.date(2000, 1, 1), datetime.time(0, 0))]
INFO:apache_beam.io.gcp.bigquery_write_it_test:Deleting dataset python_write_to_table_16271762117390 in project apache-beam-testing
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:34:53.504Z: JOB_MESSAGE_BASIC: Finished operation Create data/Read+Predict UserEvent/ParDo(_PredictUserEventFn)+ParDo(CallableWrapperDoFn)+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/_CoGBKImpl/Tag[1]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:34:55.357Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Create/Read+assert_that/Group/_CoGBKImpl/Tag[0]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:34:55.450Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:34:55.510Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:34:55.579Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:35:00.028Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:35:00.087Z: JOB_MESSAGE_DEBUG: Executing success step success19
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:35:00.168Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:35:00.223Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:35:00.253Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:36:31.554Z: JOB_MESSAGE_BASIC: Finished operation Create/Read+metrics+map_to_common_key+GroupByKey/Reify+GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:36:31.618Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:36:31.676Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:36:31.749Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:36:36.122Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:36:36.203Z: JOB_MESSAGE_DEBUG: Executing success step success11
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:36:36.268Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:36:36.319Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:36:36.351Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:37:11.983Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:37:12.016Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:37:12.047Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-24_18_29_22-6566296019455520862 is in state JOB_STATE_DONE
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:38:52.399Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:38:52.446Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-25T01:38:52.481Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-24_18_30_52-15392854720641582841 is in state JOB_STATE_DONE
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... ok
test_run_example_with_setup_file (apache_beam.examples.complete.juliaset.juliaset.juliaset_test_it.JuliaSetTestIT) ... ok
test_streaming_wordcount_debugging_it (apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT) ... SKIP: Skipped due to [BEAM-3377]: assert_that not working for streaming
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_flight_delays (apache_beam.examples.dataframe.flight_delays_it_test.FlightDelaysTest) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... ok
test_read_via_sql (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_via_table (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_queries (apache_beam.io.gcp.bigquery_read_it_test.ReadAllBQTests) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_spanner_error (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_spanner_update (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_write_batches (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_bqfl_streaming (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... SKIP: TestStream is not supported on TestDataflowRunner
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_aggregation (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_enrich (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_avro_file_load (apache_beam.io.gcp.bigquery_test.BigQueryFileLoadsIntegrationTests) ... ok
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_value_provider_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_transform_on_gcs (apache_beam.io.fileio_test.MatchIntegrationTest) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_dicom_search_instances (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_dicom_store_instance_from_gcs (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_analyzing_syntax (apache_beam.ml.gcp.naturallanguageml_test_it.NaturalLanguageMlTestIT) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_avro (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_label_detection_with_video_context (apache_beam.ml.gcp.videointelligenceml_test_it.VideoIntelligenceMlTestIT) ... ok
test_basic_execution (apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests) ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream supports emitting to multiple PCollections. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream can independently control output watermarks. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
test_deidentification (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_inspection (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_text_detection_with_language_hint (apache_beam.ml.gcp.visionml_test_it.VisionMlTestIT) ... ok
test_job_python_from_python_it (apache_beam.transforms.external_it_test.ExternalTransformIT) ... ok
test_file_loads (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_streaming_inserts (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_big_query_write (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_new_types (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_schema_autodetect (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... SKIP: DataflowRunner does not support schema autodetection
Test that schema update options are respected when appending to an existing ... ok
test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_create_catalog_item (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_create_user_event (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_predict (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_metrics_fnapi_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_metrics_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xmlWorker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_05_01-1498890087079504460?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_20_33-1899168264825279053?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_29_37-14983505798657549017?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_38_37-12806671676485261394?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_47_50-929057579611018751?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_57_03-13124620490640481216?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_06_36-7900781873394427286?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_15_49-6966689114711207812?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_04_49-5282295075012325625?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_22_21-3131902876366565103?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_30_39-5991567804886998282?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_39_26-12101901704739076830?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_48_12-5189774660915224880?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_56_51-4588733780916953831?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_05_30-16520150455238197082?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_13_51-14485858518997166633?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_04_59-2220012860547018578?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_23_55-13226974277258996338?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_32_08-17803516105227836437?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_40_08-11569133435921736947?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_48_05-776817157502026684?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_56_40-13668412805218261101?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_05_47-14451582484581575202?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_14_34-15155804284220611116?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_23_45-18099532356155480085?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_07_23-11170060297325248732?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_16_44-6096047091733232043?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_27_13-8004415467806365084?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_36_07-15055166049371433780?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_45_10-17976193679216361394?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_54_30-14563010944193249019?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_03_28-14913004732635863077?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_12_33-17892408264287181170?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_21_13-12860110620281336157?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_04_57-16548176295673629773?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_17_56-18125735947704286207?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_29_41-4290940119064240465?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_39_41-3521281605068062094?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_49_00-15236253134484479735?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_58_02-5152149579765019972?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_07_06-14062634697075869985?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_15_24-17981165806735199625?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_04_50-11616468757159276179?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_13_35-16733162118348941810?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_22_11-11424571760209740689?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_30_25-10706697680727464684?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_38_51-18119582941431298859?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_47_27-14068226721210731173?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_03_43-12523462818394567339?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_12_16-8941965086716667646?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_20_54-7302115986186973823?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_30_52-15392854720641582841?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_05_00-16632930336246784183?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_13_55-9120267119357974295?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_28_18-4300904085955279533?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_54_34-15824985295255613893?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_13_04-1902055676381414626?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_04_53-12391017823035992586?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_14_08-5447362289023566120?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_24_13-6921621771367775761?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_34_45-11603793242070852087?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_46_06-7911118729046094463?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_17_54_57-14686858761737592029?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_03_37-12795872806773439668?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_12_20-17736673715890363397?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_20_54-17791335699233783897?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_18_29_22-6566296019455520862?project=apache-beam-testing

----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 5687.879s

OK (SKIP=8)

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 38m 37s
207 actionable tasks: 144 executed, 59 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/hjz5nih6r4goa

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1472

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1472/display/redirect>

Changes:


------------------------------------------
[...truncated 23.21 MB...]
DEBUG:google.auth._default:No App Engine library was found so cannot authentication via App Engine Identity Credentials.
DEBUG:google.auth.transport._http_client:Making request: GET http://169.254.169.254
DEBUG:google.auth.transport._http_client:Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
DEBUG:urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): metadata.google.internal:80
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 244
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "DELETE /bigquery/v2/projects/apache-beam-testing/datasets/python_pubsub_bq_16271539944481?deleteContents=true&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:32:01.293Z: JOB_MESSAGE_BASIC: Finished operation Create/Read+metrics+map_to_common_key+GroupByKey/Reify+GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:32:01.373Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:32:01.446Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:32:01.555Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:32:05.889Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:32:05.957Z: JOB_MESSAGE_DEBUG: Executing success step success11
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:32:06.032Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:32:06.078Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:32:06.105Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:32:23.806Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:32:23.847Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:32:23.878Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-24_12_23_54-1618780852598824095 is in state JOB_STATE_DONE
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Attempting to perform query SELECT bytes, date, time FROM python_write_to_table_16271546204542.python_no_schema_table to BQ
DEBUG:google.auth._default:Checking None for explicit credentials as part of auth process...
DEBUG:google.auth._default:Checking Cloud SDK credentials as part of auth process...
DEBUG:google.auth._default:Cloud SDK credentials not found on disk; not using them
DEBUG:google.auth._default:Checking for App Engine runtime as part of auth process...
DEBUG:google.auth._default:No App Engine library was found so cannot authentication via App Engine Identity Credentials.
DEBUG:google.auth.transport._http_client:Making request: GET http://169.254.169.254
DEBUG:google.auth.transport._http_client:Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
DEBUG:urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): metadata.google.internal:80
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 244
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs?prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/a9ce9d7e-746f-42a4-a615-84aeeb87a0bd?maxResults=0&timeoutMs=10000&location=US&prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/a9ce9d7e-746f-42a4-a615-84aeeb87a0bd?fields=jobReference%2CtotalRows%2CpageToken%2Crows&location=US&formatOptions.useInt64Timestamp=True&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Result of query is: [(b'\xe4\xbd\xa0\xe5\xa5\xbd', datetime.date(3000, 12, 31), datetime.time(23, 59, 59)), (b'xyw', datetime.date(2011, 1, 1), datetime.time(23, 59, 59, 999999)), (b'abc', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xab\xac\xad', datetime.date(2000, 1, 1), datetime.time(0, 0))]
INFO:apache_beam.io.gcp.bigquery_write_it_test:Deleting dataset python_write_to_table_16271546204542 in project apache-beam-testing
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:34:28.812Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:34:28.856Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:34:28.881Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-24_12_26_17-13641381108454195682 is in state JOB_STATE_DONE
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:34:58.131Z: JOB_MESSAGE_BASIC: Finished operation Create data/Read+Predict UserEvent/ParDo(_PredictUserEventFn)+ParDo(CallableWrapperDoFn)+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/_CoGBKImpl/Tag[1]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:34:58.937Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Create/Read+assert_that/Group/_CoGBKImpl/Tag[0]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:34:59.017Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:34:59.075Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:34:59.145Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:35:03.616Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:35:03.669Z: JOB_MESSAGE_DEBUG: Executing success step success19
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:35:03.749Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:35:03.805Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:35:03.830Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:37:14.115Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:37:14.161Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T19:37:14.204Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-24_12_29_17-9030671023827081509 is in state JOB_STATE_DONE
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... ok
test_run_example_with_setup_file (apache_beam.examples.complete.juliaset.juliaset.juliaset_test_it.JuliaSetTestIT) ... ok
test_streaming_wordcount_debugging_it (apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT) ... SKIP: Skipped due to [BEAM-3377]: assert_that not working for streaming
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_flight_delays (apache_beam.examples.dataframe.flight_delays_it_test.FlightDelaysTest) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... ok
test_read_via_sql (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_via_table (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_queries (apache_beam.io.gcp.bigquery_read_it_test.ReadAllBQTests) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bqfl_streaming (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... SKIP: TestStream is not supported on TestDataflowRunner
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_spanner_error (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_spanner_update (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_write_batches (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_aggregation (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_enrich (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_avro_file_load (apache_beam.io.gcp.bigquery_test.BigQueryFileLoadsIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_value_provider_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) ... ok
test_transform_on_gcs (apache_beam.io.fileio_test.MatchIntegrationTest) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_dicom_search_instances (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_dicom_store_instance_from_gcs (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_analyzing_syntax (apache_beam.ml.gcp.naturallanguageml_test_it.NaturalLanguageMlTestIT) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_deidentification (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_inspection (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_avro (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_basic_execution (apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests) ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream supports emitting to multiple PCollections. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream can independently control output watermarks. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
test_label_detection_with_video_context (apache_beam.ml.gcp.videointelligenceml_test_it.VideoIntelligenceMlTestIT) ... ok
test_text_detection_with_language_hint (apache_beam.ml.gcp.visionml_test_it.VisionMlTestIT) ... ok
test_job_python_from_python_it (apache_beam.transforms.external_it_test.ExternalTransformIT) ... ok
test_file_loads (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_streaming_inserts (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_big_query_write (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_new_types (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_schema_autodetect (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... SKIP: DataflowRunner does not support schema autodetection
Test that schema update options are respected when appending to an existing ... ok
test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_metrics_fnapi_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_metrics_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_create_catalog_item (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_create_user_event (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_predict (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xmlWorker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_04_44-11864366900576955084?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_20_10-5737923774368642059?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_29_24-5343187179582729950?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_38_43-1995163969252894994?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_47_40-4973401867436976499?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_56_14-6327902352967915037?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_05_31-8888265802413692270?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_14_23-1296771970456546829?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_23_54-1618780852598824095?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_04_42-7649969747443129520?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_23_37-12016269870532138996?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_32_00-8784719182060343708?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_40_03-18032109072861064095?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_48_26-4221167267983365000?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_58_07-729944923189958484?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_06_52-1631102659186391557?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_15_25-8266495146550333795?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_04_42-15490825140850518799?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_17_42-10363620970078423798?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_29_22-9947306195480649993?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_38_01-18247076726540212252?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_46_37-994679598997206886?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_56_15-17466456482724217596?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_04_38-13521803402703024232?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_13_07-7489791598573153965?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_04_50-6957307437004193772?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_22_29-2949126806975309342?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_30_54-4436996369747393938?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_39_32-11233938542666345404?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_48_11-18204696555982563605?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_56_22-17767788829669561281?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_04_55-15951250088481248731?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_13_18-15711554377439785441?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_21_36-9689019308764161585?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_06_33-5514559901571363911?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_16_19-2794001417685358286?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_26_55-12767518772502862775?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_36_13-3269761702754408403?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_45_45-17680293378308300564?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_55_03-12850812549468707198?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_03_56-8420032061531951876?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_13_12-14203581343246930371?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_04_39-14145777387294262894?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_13_34-14626196498288507521?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_22_50-6506183229855402593?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_31_08-18035474758513921092?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_39_35-1910347467619043101?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_48_24-3154016498602255478?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_57_18-9217401115573920889?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_06_48-2070442375154603503?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_16_14-1116722472018234284?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_26_17-13641381108454195682?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_04_42-14975804433591410536?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_13_52-8588904371699187611?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_28_15-1374364378756451963?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_55_01-15728577029828689043?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_13_25-2002407133759823549?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_04_40-6100015825018556530?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_14_04-12517580456166629162?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_24_18-5783977377828267446?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_34_48-5237678352365305880?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_45_54-3946194064980331707?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_11_54_24-300893777276129755?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_03_22-6179518933439030495?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_12_31-1955978536273975602?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_20_54-10688318713948093002?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_12_29_17-9030671023827081509?project=apache-beam-testing

----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 5588.674s

OK (SKIP=8)

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 36m 58s
207 actionable tasks: 144 executed, 59 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/ghczp4xtay5ss

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1471

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1471/display/redirect>

Changes:


------------------------------------------
[...truncated 23.21 MB...]
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 244
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "DELETE /bigquery/v2/projects/apache-beam-testing/datasets/python_pubsub_bq_16271323653485?deleteContents=true&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:30:42.387Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 based on the rate of progress in the currently running stage(s).
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:30:47.953Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:30:47.999Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:30:48.035Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-24_06_22_50-10588612397965395319 is in state JOB_STATE_DONE
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:31:06.195Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:31:06.225Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:32:31.093Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:32:31.139Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:32:31.184Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-24_06_24_03-2889933301160739707 is in state JOB_STATE_DONE
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Attempting to perform query SELECT bytes, date, time FROM python_write_to_table_16271330291216.python_no_schema_table to BQ
DEBUG:google.auth._default:Checking None for explicit credentials as part of auth process...
DEBUG:google.auth._default:Checking Cloud SDK credentials as part of auth process...
DEBUG:google.auth._default:Cloud SDK credentials not found on disk; not using them
DEBUG:google.auth._default:Checking for App Engine runtime as part of auth process...
DEBUG:google.auth._default:No App Engine library was found so cannot authentication via App Engine Identity Credentials.
DEBUG:google.auth.transport._http_client:Making request: GET http://169.254.169.254
DEBUG:google.auth.transport._http_client:Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
DEBUG:urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): metadata.google.internal:80
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 244
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs?prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/e8fceaf6-0361-440c-b5a6-e5ef153c4263?maxResults=0&timeoutMs=10000&location=US&prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/e8fceaf6-0361-440c-b5a6-e5ef153c4263?fields=jobReference%2CtotalRows%2CpageToken%2Crows&location=US&formatOptions.useInt64Timestamp=True&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Result of query is: [(b'xyw', datetime.date(2011, 1, 1), datetime.time(23, 59, 59, 999999)), (b'abc', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xab\xac\xad', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xe4\xbd\xa0\xe5\xa5\xbd', datetime.date(3000, 12, 31), datetime.time(23, 59, 59))]
INFO:apache_beam.io.gcp.bigquery_write_it_test:Deleting dataset python_write_to_table_16271330291216 in project apache-beam-testing
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:33:08.886Z: JOB_MESSAGE_BASIC: Finished operation Create/Read+metrics+map_to_common_key+GroupByKey/Reify+GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:33:08.976Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:33:09.044Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:33:09.150Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:33:12.539Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:33:12.606Z: JOB_MESSAGE_DEBUG: Executing success step success11
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:33:12.683Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:33:12.741Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:33:12.790Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:35:26.412Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Create/Read+assert_that/Group/_CoGBKImpl/Tag[0]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:35:26.591Z: JOB_MESSAGE_BASIC: Finished operation Create data/Read+Predict UserEvent/ParDo(_PredictUserEventFn)+ParDo(CallableWrapperDoFn)+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/_CoGBKImpl/Tag[1]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:35:26.651Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:35:26.704Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:35:26.761Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:35:32.610Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:35:32.657Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:35:32.701Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:35:32.186Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:35:32.252Z: JOB_MESSAGE_DEBUG: Executing success step success19
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:35:32.323Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:35:32.364Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:35:32.386Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-24_06_27_21-628578266094808595 is in state JOB_STATE_DONE
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:37:47.837Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:37:47.875Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T13:37:47.923Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-24_06_29_54-9377639927486161540 is in state JOB_STATE_DONE
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... ok
test_run_example_with_setup_file (apache_beam.examples.complete.juliaset.juliaset.juliaset_test_it.JuliaSetTestIT) ... ok
test_streaming_wordcount_debugging_it (apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT) ... SKIP: Skipped due to [BEAM-3377]: assert_that not working for streaming
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_flight_delays (apache_beam.examples.dataframe.flight_delays_it_test.FlightDelaysTest) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... ok
test_read_via_sql (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_via_table (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_queries (apache_beam.io.gcp.bigquery_read_it_test.ReadAllBQTests) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bqfl_streaming (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... SKIP: TestStream is not supported on TestDataflowRunner
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_spanner_error (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_spanner_update (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_write_batches (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_aggregation (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_enrich (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_avro_file_load (apache_beam.io.gcp.bigquery_test.BigQueryFileLoadsIntegrationTests) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_value_provider_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) ... ok
test_transform_on_gcs (apache_beam.io.fileio_test.MatchIntegrationTest) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_dicom_search_instances (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_dicom_store_instance_from_gcs (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_analyzing_syntax (apache_beam.ml.gcp.naturallanguageml_test_it.NaturalLanguageMlTestIT) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_avro (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_deidentification (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_inspection (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_basic_execution (apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests) ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream supports emitting to multiple PCollections. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream can independently control output watermarks. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
test_label_detection_with_video_context (apache_beam.ml.gcp.videointelligenceml_test_it.VideoIntelligenceMlTestIT) ... ok
test_text_detection_with_language_hint (apache_beam.ml.gcp.visionml_test_it.VisionMlTestIT) ... ok
test_file_loads (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_streaming_inserts (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_job_python_from_python_it (apache_beam.transforms.external_it_test.ExternalTransformIT) ... ok
test_big_query_write (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_new_types (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_schema_autodetect (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... SKIP: DataflowRunner does not support schema autodetection
Test that schema update options are respected when appending to an existing ... ok
test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_metrics_fnapi_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_metrics_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_create_catalog_item (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_create_user_event (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_predict (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xmlWorker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_05_23-8894582186308031499?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_21_00-16608676488395619724?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_30_05-16965210867766347515?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_40_01-18010171260600548010?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_49_01-18026448086163623616?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_57_43-10803289677934871924?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_06_16-3628752226785349200?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_14_30-15770629174095618778?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_05_22-16081376617948127145?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_25_33-10423333959916845953?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_33_51-8839565261436541162?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_42_15-6729591331006587667?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_51_30-11017436253775545975?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_00_22-15640589913010749097?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_09_05-18019904506275188535?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_17_13-3117524634605165923?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_27_21-628578266094808595?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_05_18-18409422537659356899?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_23_07-16290682939117173259?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_31_30-7871291660798664898?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_40_01-12683263981748387571?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_48_50-5421531249194291954?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_56_58-7244119834864742400?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_05_31-15316190594343914856?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_14_31-9665632751310649477?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_05_22-1957939412667525319?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_18_20-7647941605400609979?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_30_30-9558229800634380713?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_39_06-15113896372928858427?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_47_48-4267546884574051656?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_56_56-16217664919708630307?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_05_57-12745797151060448179?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_15_01-17418055349489866905?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_24_03-2889933301160739707?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_07_35-17206157172413060601?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_17_07-1748433516071464671?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_27_54-12414194782592370?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_37_21-6729148361132874412?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_46_49-5130164680349045902?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_56_11-11074637164519109084?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_04_46-4751936744234688512?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_14_07-18229713496164842567?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_22_50-10588612397965395319?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_05_21-1508036799268444502?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_14_25-13040432158651880729?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_28_22-13359319310409241811?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_54_28-2919865066320334159?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_12_57-15896226218453626613?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_05_25-3167092366949615888?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_14_19-6879324065681175124?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_23_20-14724424241704980071?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_31_49-10431546746872518459?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_40_15-18063069159273862570?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_48_54-8622198726927820565?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_57_51-16067687464726972060?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_07_18-11756076922807645808?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_16_59-9741440175527156635?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_05_22-9813623671572863774?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_14_26-6346624256876633417?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_24_45-2666141690472070351?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_35_01-15388899438570529635?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_46_16-12699689339728926101?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_05_55_00-1275943723526198985?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_03_34-14598051294208533189?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_12_47-11316999391927015431?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_21_26-7324137842591577166?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_06_29_54-9377639927486161540?project=apache-beam-testing

----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 5613.625s

OK (SKIP=8)

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 37m 31s
207 actionable tasks: 144 executed, 59 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/enb3sk7swm5uc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1470

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1470/display/redirect>

Changes:


------------------------------------------
[...truncated 23.13 MB...]
DEBUG:google.auth.transport._http_client:Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
DEBUG:urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): metadata.google.internal:80
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 244
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "DELETE /bigquery/v2/projects/apache-beam-testing/datasets/python_pubsub_bq_16271108621601?deleteContents=true&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:33:11.853Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:33:11.893Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:33:11.920Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-24_00_24_41-65215017382219741 is in state JOB_STATE_DONE
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Attempting to perform query SELECT bytes, date, time FROM python_write_to_table_16271114672756.python_no_schema_table to BQ
DEBUG:google.auth._default:Checking None for explicit credentials as part of auth process...
DEBUG:google.auth._default:Checking Cloud SDK credentials as part of auth process...
DEBUG:google.auth._default:Cloud SDK credentials not found on disk; not using them
DEBUG:google.auth._default:Checking for App Engine runtime as part of auth process...
DEBUG:google.auth._default:No App Engine library was found so cannot authentication via App Engine Identity Credentials.
DEBUG:google.auth.transport._http_client:Making request: GET http://169.254.169.254
DEBUG:google.auth.transport._http_client:Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
DEBUG:urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): metadata.google.internal:80
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 244
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs?prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/862bf64d-dc81-48b1-8218-7b594522e4a7?maxResults=0&timeoutMs=10000&location=US&prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/862bf64d-dc81-48b1-8218-7b594522e4a7?fields=jobReference%2CtotalRows%2CpageToken%2Crows&location=US&formatOptions.useInt64Timestamp=True&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Result of query is: [(b'abc', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xab\xac\xad', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xe4\xbd\xa0\xe5\xa5\xbd', datetime.date(3000, 12, 31), datetime.time(23, 59, 59)), (b'xyw', datetime.date(2011, 1, 1), datetime.time(23, 59, 59, 999999))]
INFO:apache_beam.io.gcp.bigquery_write_it_test:Deleting dataset python_write_to_table_16271114672756 in project apache-beam-testing
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:34:30.188Z: JOB_MESSAGE_BASIC: Finished operation Create/Read+metrics+map_to_common_key+GroupByKey/Reify+GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:34:30.260Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:34:30.326Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:34:30.399Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:34:33.761Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:34:33.832Z: JOB_MESSAGE_DEBUG: Executing success step success11
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:34:33.923Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:34:33.976Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:34:34.010Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:oauth2client.transport:Refreshing due to a 401 (attempt 1/2)
INFO:oauth2client.transport:Refreshing due to a 401 (attempt 1/2)
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:36:36.448Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Create/Read+assert_that/Group/_CoGBKImpl/Tag[0]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:36:36.658Z: JOB_MESSAGE_BASIC: Finished operation Create data/Read+Predict UserEvent/ParDo(_PredictUserEventFn)+ParDo(CallableWrapperDoFn)+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/_CoGBKImpl/Tag[1]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:36:36.721Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:36:36.793Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:36:36.874Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:36:41.849Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:36:41.916Z: JOB_MESSAGE_DEBUG: Executing success step success19
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:36:41.984Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:36:42.039Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:36:42.070Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:36:47.568Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:36:47.617Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:36:47.656Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-24_00_28_58-1402721464947022660 is in state JOB_STATE_DONE
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:39:01.039Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:39:01.083Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T07:39:01.121Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-24_00_31_01-3970034537571318303 is in state JOB_STATE_DONE
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... ok
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_run_example_with_setup_file (apache_beam.examples.complete.juliaset.juliaset.juliaset_test_it.JuliaSetTestIT) ... ok
test_streaming_wordcount_debugging_it (apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT) ... SKIP: Skipped due to [BEAM-3377]: assert_that not working for streaming
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_flight_delays (apache_beam.examples.dataframe.flight_delays_it_test.FlightDelaysTest) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... ok
test_read_via_sql (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_via_table (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_queries (apache_beam.io.gcp.bigquery_read_it_test.ReadAllBQTests) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_spanner_error (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_spanner_update (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_write_batches (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_bqfl_streaming (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... SKIP: TestStream is not supported on TestDataflowRunner
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_aggregation (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_enrich (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_avro_file_load (apache_beam.io.gcp.bigquery_test.BigQueryFileLoadsIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_value_provider_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) ... ok
test_transform_on_gcs (apache_beam.io.fileio_test.MatchIntegrationTest) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_dicom_search_instances (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_dicom_store_instance_from_gcs (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_analyzing_syntax (apache_beam.ml.gcp.naturallanguageml_test_it.NaturalLanguageMlTestIT) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_avro (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_deidentification (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_inspection (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_basic_execution (apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests) ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream supports emitting to multiple PCollections. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream can independently control output watermarks. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
test_label_detection_with_video_context (apache_beam.ml.gcp.videointelligenceml_test_it.VideoIntelligenceMlTestIT) ... ok
test_text_detection_with_language_hint (apache_beam.ml.gcp.visionml_test_it.VisionMlTestIT) ... ok
test_job_python_from_python_it (apache_beam.transforms.external_it_test.ExternalTransformIT) ... ok
test_file_loads (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_streaming_inserts (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_big_query_write (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_new_types (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_schema_autodetect (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... SKIP: DataflowRunner does not support schema autodetection
Test that schema update options are respected when appending to an existing ... ok
test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_metrics_fnapi_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_metrics_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_create_catalog_item (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_create_user_event (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_predict (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xmlWorker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_06_16-4914634955394747993?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_21_50-15189694353130958559?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_30_36-11608799478979784284?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_42_54-17353783498951377328?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_51_22-11964835902475415901?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_00_38-16219175845586258?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_09_20-11633051180722986432?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_17_44-9410306604492853723?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_06_16-12461511676476224065?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_24_44-4858437333327114095?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_32_52-13273982866142640580?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_42_27-3604038310698021802?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_51_05-7805509204757045424?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_00_01-13568765657418235575?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_09_28-3819004389640384303?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_18_44-4356796899097873591?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_28_58-1402721464947022660?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_06_14-5212817076919917160?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_19_28-15059311194423598950?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_31_18-11058138713128333141?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_39_53-11947446340679517457?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_48_29-16024974703642115806?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_57_41-9238315949933132659?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_06_32-3824783712073236368?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_15_30-3713567980358872916?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_24_41-65215017382219741?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_06_11-7173866815350424231?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_24_04-8872629216025671222?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_32_28-2340141649485352813?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_41_07-89950552668674545?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_50_36-2398906198729559837?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_59_04-817813223598924658?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_07_48-4351419928343653690?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_16_21-5428670047130306503?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_08_16-9323760839245516091?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_17_58-11181854871054359235?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_29_01-3834452652710635069?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_38_20-6746940050432262129?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_47_38-17257562106773703958?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_57_09-5686565081806466273?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_05_51-8495650383019660201?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_15_03-6768472608930990955?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_23_52-12131651777784625129?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_06_13-2062886531067829113?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_15_34-8072734238517019965?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_25_47-5128559778287951313?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_36_32-17080901606897414724?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_47_38-14504747458235849473?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_56_20-12615567394568670130?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_04_55-14405570096615516835?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_13_49-1079909605605933599?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_22_33-13845417806791113001?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_31_01-3970034537571318303?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_06_14-15685864285685516916?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_15_19-1125264053010463640?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_24_14-17150257234934263282?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_33_52-3069644429010527733?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_42_22-5226497140571451061?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_50_52-14508968770402132825?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_59_11-13343878690435001700?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_07_56-13342154388418968561?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_16_15-14728522081868697443?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_06_12-9938563536366917998?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_15_22-3648003507081794820?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_30_09-972699744190632503?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_23_55_59-6632840952220636576?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-24_00_14_35-11425926705723912892?project=apache-beam-testing

----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 5628.888s

OK (SKIP=8)

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 38m 39s
207 actionable tasks: 144 executed, 59 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/5fwt2wind4cjg

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1469

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1469/display/redirect?page=changes>

Changes:

[noreply] [BEAM-12399] Move CPython license to own file. (#15201)

[noreply] Revert "Default to Runner v2 for Python Streaming jobs. (#15140)"


------------------------------------------
[...truncated 23.11 MB...]
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:29.384Z: JOB_MESSAGE_BASIC: Executing operation write/BigQueryBatchFileLoads/ImpulseMonitorCopyJobs/Read+write/BigQueryBatchFileLoads/WaitForCopyJobs/WaitForCopyJobs
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:31.756Z: JOB_MESSAGE_BASIC: Finished operation write/BigQueryBatchFileLoads/ImpulseMonitorCopyJobs/Read+write/BigQueryBatchFileLoads/WaitForCopyJobs/WaitForCopyJobs
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:31.807Z: JOB_MESSAGE_DEBUG: Value "write/BigQueryBatchFileLoads/WaitForCopyJobs.out" materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:31.881Z: JOB_MESSAGE_BASIC: Executing operation write/BigQueryBatchFileLoads/RemoveTempTables/PassTables/_UnpickledSideInput(WaitForCopyJobs.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:31.938Z: JOB_MESSAGE_BASIC: Finished operation write/BigQueryBatchFileLoads/RemoveTempTables/PassTables/_UnpickledSideInput(WaitForCopyJobs.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:32.004Z: JOB_MESSAGE_DEBUG: Value "write/BigQueryBatchFileLoads/RemoveTempTables/PassTables/_UnpickledSideInput(WaitForCopyJobs.out.0).output" materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:32.062Z: JOB_MESSAGE_BASIC: Executing operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:32.267Z: JOB_MESSAGE_BASIC: Finished operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:32.330Z: JOB_MESSAGE_DEBUG: Value "write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Session" materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:32.399Z: JOB_MESSAGE_BASIC: Executing operation write/BigQueryBatchFileLoads/RemoveTempTables/Impulse/Read+write/BigQueryBatchFileLoads/RemoveTempTables/PassTables/PassTables+write/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Reify+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:36.895Z: JOB_MESSAGE_BASIC: Finished operation write/BigQueryBatchFileLoads/RemoveTempTables/Impulse/Read+write/BigQueryBatchFileLoads/RemoveTempTables/PassTables/PassTables+write/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Reify+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:36.968Z: JOB_MESSAGE_BASIC: Executing operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:37.038Z: JOB_MESSAGE_BASIC: Finished operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:37.107Z: JOB_MESSAGE_BASIC: Executing operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Read+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/GroupByWindow+write/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames/Keys+write/BigQueryBatchFileLoads/RemoveTempTables/Delete
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-23_18_33_31-4528512495010924757 is in state JOB_STATE_RUNNING
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.427Z: JOB_MESSAGE_BASIC: Finished operation write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Read+write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/GroupByWindow+write/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames/Keys+write/BigQueryBatchFileLoads/RemoveTempTables/Delete
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.489Z: JOB_MESSAGE_DEBUG: Executing success step success48
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.582Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.631Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.668Z: JOB_MESSAGE_BASIC: Stopping worker pool...
DEBUG:google.auth._default:Checking None for explicit credentials as part of auth process...
DEBUG:google.auth._default:Checking Cloud SDK credentials as part of auth process...
DEBUG:google.auth._default:Cloud SDK credentials not found on disk; not using them
DEBUG:google.auth._default:Checking for App Engine runtime as part of auth process...
DEBUG:google.auth._default:No App Engine library was found so cannot authentication via App Engine Identity Credentials.
DEBUG:google.auth.transport._http_client:Making request: GET http://169.254.169.254
DEBUG:google.auth.transport._http_client:Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
DEBUG:urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): metadata.google.internal:80
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 244
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "DELETE /bigquery/v2/projects/apache-beam-testing/datasets/python_pubsub_bq_16270893461110?deleteContents=true&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:35.293Z: JOB_MESSAGE_DETAILED: Autoscaling is enabled for job 2021-07-23_18_33_31-4528512495010924757. The number of workers will be between 1 and 1000.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:35.400Z: JOB_MESSAGE_DETAILED: Autoscaling was automatically enabled for job 2021-07-23_18_33_31-4528512495010924757.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:37.702Z: JOB_MESSAGE_BASIC: Worker configuration: e2-standard-2 in us-central1-c.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.467Z: JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.501Z: JOB_MESSAGE_DEBUG: Combiner lifting skipped for step assert_that/Group/_CoGBKImpl/GroupByKey: GroupByKey not followed by a combiner.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.543Z: JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into optimizable parts.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.568Z: JOB_MESSAGE_DETAILED: Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.620Z: JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.668Z: JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.694Z: JOB_MESSAGE_DETAILED: Unzipping flatten s9 for input s7.None
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.716Z: JOB_MESSAGE_DETAILED: Fusing unzipped copy of assert_that/Group/_CoGBKImpl/GroupByKey/Reify, through flatten assert_that/Group/_CoGBKImpl/Flatten, into producer assert_that/Group/_CoGBKImpl/Tag[0]
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.737Z: JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Group/_CoGBKImpl/GroupByKey/Reify into assert_that/Group/_CoGBKImpl/Tag[1]
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.783Z: JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow into assert_that/Group/_CoGBKImpl/GroupByKey/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.805Z: JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Group/_CoGBKImpl/MapTuple(collect_values) into assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.850Z: JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Group/RestoreTags into assert_that/Group/_CoGBKImpl/MapTuple(collect_values)
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.879Z: JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Unkey into assert_that/Group/RestoreTags
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.916Z: JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Match into assert_that/Unkey
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.947Z: JOB_MESSAGE_DETAILED: Unzipping flatten s9-u13 for input s10-reify-value0-c11
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:38.981Z: JOB_MESSAGE_DETAILED: Fusing unzipped copy of assert_that/Group/_CoGBKImpl/GroupByKey/Write, through flatten assert_that/Group/_CoGBKImpl/Flatten/Unzipped-1, into producer assert_that/Group/_CoGBKImpl/GroupByKey/Reify
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.009Z: JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Group/_CoGBKImpl/GroupByKey/Write into assert_that/Group/_CoGBKImpl/GroupByKey/Reify
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.044Z: JOB_MESSAGE_DETAILED: Fusing consumer Predict UserEvent/ParDo(_PredictUserEventFn) into Create data/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.077Z: JOB_MESSAGE_DETAILED: Fusing consumer ParDo(CallableWrapperDoFn) into Predict UserEvent/ParDo(_PredictUserEventFn)
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.109Z: JOB_MESSAGE_DETAILED: Fusing consumer assert_that/WindowInto(WindowIntoFn) into ParDo(CallableWrapperDoFn)
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.141Z: JOB_MESSAGE_DETAILED: Fusing consumer assert_that/ToVoidKey into assert_that/WindowInto(WindowIntoFn)
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.174Z: JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Group/_CoGBKImpl/Tag[1] into assert_that/ToVoidKey
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.208Z: JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Group/_CoGBKImpl/Tag[0] into assert_that/Create/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.246Z: JOB_MESSAGE_DEBUG: Workflow config is missing a default resource spec.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.283Z: JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.317Z: JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.351Z: JOB_MESSAGE_DEBUG: Assigning stage ids.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.507Z: JOB_MESSAGE_DEBUG: Executing wait step start21
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.567Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.616Z: JOB_MESSAGE_DEBUG: Starting worker pool setup.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:39.645Z: JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-c...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:40.051Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:40.124Z: JOB_MESSAGE_DEBUG: Value "assert_that/Group/_CoGBKImpl/GroupByKey/Session" materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:40.183Z: JOB_MESSAGE_BASIC: Executing operation Create data/Read+Predict UserEvent/ParDo(_PredictUserEventFn)+ParDo(CallableWrapperDoFn)+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/_CoGBKImpl/Tag[1]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:33:40.221Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Create/Read+assert_that/Group/_CoGBKImpl/Tag[0]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:34:08.432Z: JOB_MESSAGE_BASIC: Your project already contains 100 Dataflow-created metric descriptors, so new user metrics of the form custom.googleapis.com/* will not be created. However, all user metrics are also available in the metric dataflow.googleapis.com/job/user_counter. If you rely on the custom metrics, you can delete old / unused metric descriptors. See https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list and https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:34:27.835Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 based on the rate of progress in the currently running stage(s).
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:34:52.698Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:34:52.725Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:36:01.162Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:36:01.211Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:36:01.246Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-23_18_27_28-11996399236829521346 is in state JOB_STATE_DONE
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Attempting to perform query SELECT bytes, date, time FROM python_write_to_table_16270900348627.python_no_schema_table to BQ
DEBUG:google.auth._default:Checking None for explicit credentials as part of auth process...
DEBUG:google.auth._default:Checking Cloud SDK credentials as part of auth process...
DEBUG:google.auth._default:Cloud SDK credentials not found on disk; not using them
DEBUG:google.auth._default:Checking for App Engine runtime as part of auth process...
DEBUG:google.auth._default:No App Engine library was found so cannot authentication via App Engine Identity Credentials.
DEBUG:google.auth.transport._http_client:Making request: GET http://169.254.169.254
DEBUG:google.auth.transport._http_client:Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
DEBUG:urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
DEBUG:urllib3.connectionpool:Starting new HTTP connection (1): metadata.google.internal:80
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
DEBUG:google.auth.transport.requests:Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform
DEBUG:urllib3.connectionpool:http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fbigquery%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform HTTP/1.1" 200 244
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): bigquery.googleapis.com:443
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "POST /bigquery/v2/projects/apache-beam-testing/jobs?prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/435af05d-cef3-43ca-9dcc-14ad98975d07?maxResults=0&timeoutMs=10000&location=US&prettyPrint=false HTTP/1.1" 200 None
DEBUG:urllib3.connectionpool:https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/queries/435af05d-cef3-43ca-9dcc-14ad98975d07?fields=jobReference%2CtotalRows%2CpageToken%2Crows&location=US&formatOptions.useInt64Timestamp=True&prettyPrint=false HTTP/1.1" 200 None
INFO:apache_beam.io.gcp.tests.bigquery_matcher:Result of query is: [(b'\xe4\xbd\xa0\xe5\xa5\xbd', datetime.date(3000, 12, 31), datetime.time(23, 59, 59)), (b'abc', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'\xab\xac\xad', datetime.date(2000, 1, 1), datetime.time(0, 0)), (b'xyw', datetime.date(2011, 1, 1), datetime.time(23, 59, 59, 999999))]
INFO:apache_beam.io.gcp.bigquery_write_it_test:Deleting dataset python_write_to_table_16270900348627 in project apache-beam-testing
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:36:52.476Z: JOB_MESSAGE_BASIC: Finished operation Create/Read+metrics+map_to_common_key+GroupByKey/Reify+GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:36:52.543Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:36:52.600Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:36:52.657Z: JOB_MESSAGE_BASIC: Executing operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:36:57.985Z: JOB_MESSAGE_BASIC: Finished operation GroupByKey/Read+GroupByKey/GroupByWindow+m_out
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:36:58.038Z: JOB_MESSAGE_DEBUG: Executing success step success11
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:36:58.121Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:36:58.173Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:36:58.204Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:39:15.155Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Create/Read+assert_that/Group/_CoGBKImpl/Tag[0]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:39:16.625Z: JOB_MESSAGE_BASIC: Finished operation Create data/Read+Predict UserEvent/ParDo(_PredictUserEventFn)+ParDo(CallableWrapperDoFn)+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/_CoGBKImpl/Tag[1]+assert_that/Group/_CoGBKImpl/GroupByKey/Reify+assert_that/Group/_CoGBKImpl/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:39:16.714Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:39:16.764Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:39:16.834Z: JOB_MESSAGE_BASIC: Executing operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:39:21.025Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/_CoGBKImpl/GroupByKey/Read+assert_that/Group/_CoGBKImpl/GroupByKey/GroupByWindow+assert_that/Group/_CoGBKImpl/MapTuple(collect_values)+assert_that/Group/RestoreTags+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:39:21.088Z: JOB_MESSAGE_DEBUG: Executing success step success19
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:39:21.156Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:39:21.204Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:39:21.241Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:39:20.488Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:39:20.517Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:39:20.538Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-23_18_31_18-6706727028119194663 is in state JOB_STATE_DONE
INFO:oauth2client.transport:Refreshing due to a 401 (attempt 1/2)
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:41:37.075Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:41:37.131Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2021-07-24T01:41:37.160Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 2021-07-23_18_33_31-4528512495010924757 is in state JOB_STATE_DONE
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... ok
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_streaming_wordcount_debugging_it (apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT) ... SKIP: Skipped due to [BEAM-3377]: assert_that not working for streaming
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_run_example_with_setup_file (apache_beam.examples.complete.juliaset.juliaset.juliaset_test_it.JuliaSetTestIT) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_flight_delays (apache_beam.examples.dataframe.flight_delays_it_test.FlightDelaysTest) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... ok
test_read_via_sql (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_via_table (apache_beam.io.gcp.experimental.spannerio_read_it_test.SpannerReadIntegrationTest) ... ok
test_read_queries (apache_beam.io.gcp.bigquery_read_it_test.ReadAllBQTests) ... ok
test_spanner_error (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_spanner_update (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_write_batches (apache_beam.io.gcp.experimental.spannerio_write_it_test.SpannerWriteIntegrationTest) ... ok
test_bqfl_streaming (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... SKIP: TestStream is not supported on TestDataflowRunner
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_aggregation (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_enrich (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_avro_file_load (apache_beam.io.gcp.bigquery_test.BigQueryFileLoadsIntegrationTests) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... SKIP: BEAM-12352: enable once maxBytesRewrittenPerCall works again
test_value_provider_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) ... ok
test_transform_on_gcs (apache_beam.io.fileio_test.MatchIntegrationTest) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_dicom_search_instances (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_dicom_store_instance_from_gcs (apache_beam.io.gcp.dicomio_integration_test.DICOMIoIntegrationTest) ... ok
test_analyzing_syntax (apache_beam.ml.gcp.naturallanguageml_test_it.NaturalLanguageMlTestIT) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_deidentification (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_inspection (apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_avro (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_basic_execution (apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests) ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream supports emitting to multiple PCollections. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream can independently control output watermarks. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
test_label_detection_with_video_context (apache_beam.ml.gcp.videointelligenceml_test_it.VideoIntelligenceMlTestIT) ... ok
test_text_detection_with_language_hint (apache_beam.ml.gcp.visionml_test_it.VisionMlTestIT) ... ok
test_job_python_from_python_it (apache_beam.transforms.external_it_test.ExternalTransformIT) ... ok
test_file_loads (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_streaming_inserts (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_big_query_write (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_new_types (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_schema_autodetect (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... SKIP: DataflowRunner does not support schema autodetection
Test that schema update options are respected when appending to an existing ... ok
test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_metrics_fnapi_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_metrics_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_create_catalog_item (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_create_user_event (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok
test_predict (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) ... ok

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xml
----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 5665.562s

OK (SKIP=8)

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 40m 47s
207 actionable tasks: 146 executed, 57 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/ouqutpv5f6ibc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1468

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1468/display/redirect?page=changes>

Changes:

[noreply] [BEAM-12652] revving JS filename to bust cache (#15209)


------------------------------------------
[...truncated 23.80 MB...]
                    },
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    }
                  ],
                  "is_pair_like": true,
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "ReadFromPubSub/Map(_from_proto_str).out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "out",
          "step_name": "s1"
        },
        "serialized_fn": "ref_AppliedPTransform_ReadFromPubSub-Map-_from_proto_str-_4",
        "user_name": "ReadFromPubSub/Map(_from_proto_str)"
      }
    },
    {
      "kind": "ParallelDo",
      "name": "s3",
      "properties": {
        "display_data": [
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.ParDo",
            "shortValue": "CallableWrapperDoFn",
            "type": "STRING",
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          },
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
            "type": "STRING",
            "value": "add_attribute"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    },
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    }
                  ],
                  "is_pair_like": true,
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "add_attribute.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s2"
        },
        "serialized_fn": "ref_AppliedPTransform_add_attribute_5",
        "user_name": "add_attribute"
      }
    },
    {
      "kind": "ParallelDo",
      "name": "s4",
      "properties": {
        "display_data": [
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.ParDo",
            "shortValue": "CallableWrapperDoFn",
            "type": "STRING",
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          },
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
            "type": "STRING",
            "value": "message_to_proto_str"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "kind:bytes"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "WriteToPubSub/ToProtobuf.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s3"
        },
        "serialized_fn": "ref_AppliedPTransform_WriteToPubSub-ToProtobuf_7",
        "user_name": "WriteToPubSub/ToProtobuf"
      }
    },
    {
      "kind": "ParallelWrite",
      "name": "s5",
      "properties": {
        "display_data": [],
        "encoding": {
          "@type": "kind:windowed_value",
          "component_encodings": [
            {
              "@type": "kind:bytes"
            },
            {
              "@type": "kind:global_window"
            }
          ],
          "is_wrapper": true
        },
        "format": "pubsub",
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s4"
        },
        "pubsub_id_label": "id",
        "pubsub_serialized_attributes_fn": "",
        "pubsub_timestamp_label": "timestamp",
        "pubsub_topic": "projects/apache-beam-testing/topics/psit_topic_outpute04253bc-293e-483c-b18b-66f66ab15f5b",
        "user_name": "WriteToPubSub/Write/NativeWrite"
      }
    }
  ],
  "type": "JOB_TYPE_STREAMING"
}
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xml
----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 4939.009s

FAILED (SKIP=8, errors=7)

> Task :sdks:python:test-suites:dataflow:py38:postCommitIT FAILED

FAILURE: Build completed with 4 failures.

1: Task failed with an exception.
-----------
* Where:
Script '<https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/test-suites/direct/common.gradle'> line: 144

* What went wrong:
Execution failed for task ':sdks:python:test-suites:direct:py38:hdfsIntegrationTest'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

3: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

4: Task failed with an exception.
-----------
* Where:
Script '<https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/test-suites/dataflow/common.gradle'> line: 126

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py38:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 26m 24s
207 actionable tasks: 144 executed, 59 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/mgwpupqxvfgfu

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1467

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1467/display/redirect?page=changes>

Changes:

[noreply] [BEAM-12640] Enable adding JHM to Java based projects and add an example


------------------------------------------
[...truncated 23.90 MB...]
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    },
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    }
                  ],
                  "is_pair_like": true,
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "ReadFromPubSub/Map(_from_proto_str).out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "out",
          "step_name": "s1"
        },
        "serialized_fn": "ref_AppliedPTransform_ReadFromPubSub-Map-_from_proto_str-_4",
        "user_name": "ReadFromPubSub/Map(_from_proto_str)"
      }
    },
    {
      "kind": "ParallelDo",
      "name": "s3",
      "properties": {
        "display_data": [
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.ParDo",
            "shortValue": "CallableWrapperDoFn",
            "type": "STRING",
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          },
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
            "type": "STRING",
            "value": "add_attribute"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    },
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    }
                  ],
                  "is_pair_like": true,
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "add_attribute.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s2"
        },
        "serialized_fn": "ref_AppliedPTransform_add_attribute_5",
        "user_name": "add_attribute"
      }
    },
    {
      "kind": "ParallelDo",
      "name": "s4",
      "properties": {
        "display_data": [
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.ParDo",
            "shortValue": "CallableWrapperDoFn",
            "type": "STRING",
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          },
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
            "type": "STRING",
            "value": "message_to_proto_str"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "kind:bytes"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "WriteToPubSub/ToProtobuf.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s3"
        },
        "serialized_fn": "ref_AppliedPTransform_WriteToPubSub-ToProtobuf_7",
        "user_name": "WriteToPubSub/ToProtobuf"
      }
    },
    {
      "kind": "ParallelWrite",
      "name": "s5",
      "properties": {
        "display_data": [],
        "encoding": {
          "@type": "kind:windowed_value",
          "component_encodings": [
            {
              "@type": "kind:bytes"
            },
            {
              "@type": "kind:global_window"
            }
          ],
          "is_wrapper": true
        },
        "format": "pubsub",
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s4"
        },
        "pubsub_id_label": "id",
        "pubsub_serialized_attributes_fn": "",
        "pubsub_timestamp_label": "timestamp",
        "pubsub_topic": "projects/apache-beam-testing/topics/psit_topic_output05981213-c174-4a96-a651-f3884395e656",
        "user_name": "WriteToPubSub/Write/NativeWrite"
      }
    }
  ],
  "type": "JOB_TYPE_STREAMING"
}
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xml
----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 4912.706s

FAILED (SKIP=8, errors=7)

> Task :sdks:python:test-suites:dataflow:py38:postCommitIT FAILED

FAILURE: Build completed with 3 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

3: Task failed with an exception.
-----------
* Where:
Script '<https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/test-suites/dataflow/common.gradle'> line: 126

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py38:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 43m 45s
207 actionable tasks: 189 executed, 14 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/p6q7opo3lt7l4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python38 #1466

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1466/display/redirect>

Changes:


------------------------------------------
[...truncated 23.88 MB...]
            "value": "add_attribute"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    },
                    {
                      "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                    }
                  ],
                  "is_pair_like": true,
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "add_attribute.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s2"
        },
        "serialized_fn": "ref_AppliedPTransform_add_attribute_5",
        "user_name": "add_attribute"
      }
    },
    {
      "kind": "ParallelDo",
      "name": "s4",
      "properties": {
        "display_data": [
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.ParDo",
            "shortValue": "CallableWrapperDoFn",
            "type": "STRING",
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          },
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
            "type": "STRING",
            "value": "message_to_proto_str"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "kind:bytes"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "WriteToPubSub/ToProtobuf.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s3"
        },
        "serialized_fn": "ref_AppliedPTransform_WriteToPubSub-ToProtobuf_7",
        "user_name": "WriteToPubSub/ToProtobuf"
      }
    },
    {
      "kind": "ParallelWrite",
      "name": "s5",
      "properties": {
        "display_data": [],
        "encoding": {
          "@type": "kind:windowed_value",
          "component_encodings": [
            {
              "@type": "kind:bytes"
            },
            {
              "@type": "kind:global_window"
            }
          ],
          "is_wrapper": true
        },
        "format": "pubsub",
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "None",
          "step_name": "s4"
        },
        "pubsub_id_label": "id",
        "pubsub_serialized_attributes_fn": "",
        "pubsub_timestamp_label": "timestamp",
        "pubsub_topic": "projects/apache-beam-testing/topics/psit_topic_outpute8d85712-4e38-4358-9d80-6c5f1d08743d",
        "user_name": "WriteToPubSub/Write/NativeWrite"
      }
    }
  ],
  "type": "JOB_TYPE_STREAMING"
}
--------------------- >> end captured logging << ---------------------
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_07_00-12059180205304447982?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_21_18-17921596104695610828?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_47_41-15442561982780848693?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_56_34-5674344020864597741?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_04_52-1428521113991974055?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_06_43-8126581728004063360?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_26_44-5026100406154517488?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_36_19-17051143761250635746?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_45_12-4428107509300837679?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_54_53-107587705379051834?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_03_26-15706525054734186932?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_06_58-7674752384662495792?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_15_53-17856009239396375131?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_24_37-7366549151367809848?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_33_15-14675126990694543562?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_41_52-17308162978404245630?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_50_35-2526689159151346051?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_59_17-16856030827748439562?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_08_15-17429701935811429482?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_17_27-893669496539190560?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_06_35-259582009308181741?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_24_46-8885634709993495022?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_33_49-17574749085842540853?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_43_02-1420673622089643183?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_52_13-16423193213747900792?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_01_11-13885513933072768566?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_10_27-9899656616796883627?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_08_36-9897425072725920238?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_17_45-2900143059011756754?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_25_58-5026811536154486211?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_34_18-262619765205202951?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_45_59-7393674764427893967?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_55_00-7788702120888466203?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_03_56-11669041004816395729?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_06_36-7761371768781178995?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_16_01-13212193754639058452?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_24_09-11947176472529236087?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_32_48-4972018320959048264?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_41_27-15674398791796482882?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_49_50-8805554320463952765?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_58_29-16463328594134442335?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_06_48-8858834893493407449?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_17_11-12879193451213499464?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_06_41-5274103236947640985?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_15_50-16603812924984249873?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_27_30-18293364203726895019?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_36_03-6721928842090261294?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_44_47-9555359399612382?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_53_31-6115842382452032492?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_01_51-3241900940740966887?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_10_33-13415840512076417835?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_19_15-3269650442589573992?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_06_35-13261583277252168282?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_15_34-9016213545114196580?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_25_27-13178386735724241637?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_35_52-3434453494577898027?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_44_36-10470609318282312477?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-22_23_53_35-10336385719278906190?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_02_09-17272947025379611818?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2021-07-23_00_10_34-11364619008038553102?project=apache-beam-testing

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xml
----------------------------------------------------------------------
XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 74 tests in 4894.663s

FAILED (SKIP=8, errors=7)

> Task :sdks:python:test-suites:dataflow:py38:postCommitIT FAILED

FAILURE: Build completed with 3 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:java:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':release:go-licenses:py:docker'.
> Process 'command 'docker'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

3: Task failed with an exception.
-----------
* Where:
Script '<https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/test-suites/dataflow/common.gradle'> line: 126

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py38:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 1h 26m 49s
207 actionable tasks: 144 executed, 59 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/ltyptpd5rbm74

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org