You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2019/12/12 17:26:10 UTC

Build failed in Jenkins: beam_PostCommit_Python36 #1208

See <https://builds.apache.org/job/beam_PostCommit_Python36/1208/display/redirect?page=changes>

Changes:

[echauchot] [BEAM-8025] Under load we get a NoHostAvailable exception at cluster

[echauchot] [BEAM-8025] Remove temporary folder rule because it suppresses files on

[echauchot] [BEAM-8025] Disable auto compaction on Cassandra node to avoid race


------------------------------------------
[...truncated 620.97 KB...]
        "user_name": "monthly count/Combine"
      }
    },
    {
      "kind": "ParallelDo",
      "name": "s5",
      "properties": {
        "display_data": [
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.ParDo",
            "shortValue": "CallableWrapperDoFn",
            "type": "STRING",
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          },
          {
            "key": "fn",
            "label": "Transform Function",
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
            "type": "STRING",
            "value": "<lambda>"
          }
        ],
        "non_parallel_inputs": {},
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_1"
                    },
                    {
                      "@type": "FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_1"
                    }
                  ],
                  "is_pair_like": true,
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_1"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "out",
            "user_name": "format.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "out",
          "step_name": "s4"
        },
        "serialized_fn": "eNq1lFlz1EYQx7W7OIAAE5vTkIQ7aDkkwmGOcCVrDrMgjGyweKCmRtJ4R1lJo54ZYVyFq3KUXHyGFB82PVo7jhPgLQ86pnv6p55/d+vXjhPTksackYjR3NWSFmpJyFy5sZDM7tEso1HGFiUtSyZnxMPCBqv7G7RWoe2EHcuyyFIBnThJs8wl5m6TWDKqGVmqilinAgO2OVv8maAJ0Ssls2Es3IGInkjYAq7hqxq2B7DD6bf6Fl7t/r6e/cF6b/3RGrTmLdjp12B3wxaGvINdNewOc3z1uMiZ9wsrhmmhNp4XVUbfMm9ZyKHC8zHPHI/MCaV7Is9TTeZWNBfFlWlPydhTyVB5ZWPx/iGHtymHZ+RwyxXY02R8O6N5lNC7MP7sz7GeBXvDNlpRia9rmOhqmAxg35YzD5gmVGtpw/4GEFVppjFPOBBuxyW6jRcOrsGhAA5vCU3zUkhNcpFUGUo2FR7FgC8UDY7UcDSAb5rvEITEmhD4dg2+C+AYH/c/VauY4QKO820O/1v9Tn++N/neSlpTWIGkPWUlnY9trMIJxzf+cAzxuSg0h5PhHnzXQhY0ESQWVaHh1Bqc1nCm2/TIkLyF72s4G/7+/9aLvaN5mTFTLTGM8PKidAAVkytkPTumTBEdPt6/g2XrdrFW5wI4zw/zqfDSv3TdoLkbNPe/NLhQw8UAXI76egFcQn39Vfgh3GW0Nz1OeFpoBZe3jhk6GrubMCwZRZyyZ5+bGXhszDZcwRm7iqRrToNKi7LSDU/BtB/uRpOo9Kbtul+twY1I+RpuBnCrhh8DuF3DnVW46/DL3NDuIe2+w6d93mz+KRrlSOVAlSw2U/wzv1UhoBfATPMFnGCc/pEfHvhNo5ZSxEwpeMhn1rmPkPt4kzsbVdEbeLIK/Tfw9Iv/lsW0SMRyWgxseIYcxDx3mp5dbhyY0Nzn4kc77EeZiGg24qBmL5AShDtNK8p0MGASEfOfQ6xvsWfYEq0yvbC+hAWEvAwnTCvEcZVXGTX/MDN9DF5h108afJozpbE5sNfzKC2YhEV0NXqmiiQjJIQfqkjDa/cv5UvObA==",
        "user_name": "format"
      }
    },
    {
      "kind": "ParallelWrite",
      "name": "s6",
      "properties": {
        "create_disposition": "CREATE_IF_NEEDED",
        "dataset": "BigQueryTornadoesIT",
        "display_data": [],
        "encoding": {
          "@type": "kind:windowed_value",
          "component_encodings": [
            {
              "@type": "RowAsDictJsonCoder$eNprYE5OLEhMzkiNT0pNzNXLzNdLTy7QS8pMLyxNLaqML8nPzynmCsovdyx2yUwu8SrOz3POT0kt4ipk0GwsZKwtZErSAwBK5xfp",
              "component_encodings": [],
              "pipeline_proto_coder_id": "ref_Coder_RowAsDictJsonCoder_7"
            },
            {
              "@type": "kind:global_window"
            }
          ],
          "is_wrapper": true
        },
        "format": "bigquery",
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "out",
          "step_name": "s5"
        },
        "schema": "{\"fields\": [{\"name\": \"month\", \"type\": \"INTEGER\", \"mode\": \"NULLABLE\"}, {\"name\": \"tornado_count\", \"type\": \"INTEGER\", \"mode\": \"NULLABLE\"}]}",
        "table": "monthly_tornadoes_1576168667692",
        "user_name": "Write/WriteToBigQuery/NativeWrite",
        "write_disposition": "WRITE_TRUNCATE"
      }
    }
  ],
  "type": "JOB_TYPE_BATCH"
}
apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: <Job
 createTime: '2019-12-12T16:38:03.293915Z'
 currentStateTime: '1970-01-01T00:00:00Z'
 id: '2019-12-12_08_38_01-12820062103692776896'
 location: 'us-central1'
 name: 'beamapp-jenkins-1212163748-158924'
 projectId: 'apache-beam-testing'
 stageStates: []
 startTime: '2019-12-12T16:38:03.293915Z'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id: [2019-12-12_08_38_01-12820062103692776896]
apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_38_01-12820062103692776896?project=apache-beam-testing
apache_beam.io.gcp.tests.utils: INFO: Clean up a BigQuery table with project: apache-beam-testing, dataset: BigQueryTornadoesIT, table: monthly_tornadoes_1576168667692.
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
urllib3.util.retry: DEBUG: Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token HTTP/1.1" 200 181
urllib3.connectionpool: DEBUG: Starting new HTTPS connection (1): www.googleapis.com:443
urllib3.connectionpool: DEBUG: https://www.googleapis.com:443 "DELETE /bigquery/v2/projects/apache-beam-testing/datasets/BigQueryTornadoesIT/tables/monthly_tornadoes_1576168667692 HTTP/1.1" 404 None
--------------------- >> end captured logging << ---------------------
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_38_05-10314101527473050354?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1220: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_52_10-18409379154982617266?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_01_00-9193195482834051252?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_08_37-6326285987594106454?project=apache-beam-testing
  experiments = p.options.view_as(DebugOptions).experiments or []
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_test.py>:652: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  streaming = self.test_pipeline.options.view_as(StandardOptions).streaming
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1220: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_38_07-2250759228353554767?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1220: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_50_23-6371600927579643244?project=apache-beam-testing
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_58_03-1148489483974692725?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_06_02-6330502553605365745?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_13_58-10146256153778888813?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_38_02-2760941811592206808?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:738: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_59_34-8548869175537845260?project=apache-beam-testing
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_07_28-5277174562063569103?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:738: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_15_26-3166802950019809084?project=apache-beam-testing
  kms_key=transform.kms_key))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:738: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:738: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_38_06-9978395309284686575?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1217: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_57_43-17728704460770564698?project=apache-beam-testing
  self.table_reference.projectId = pcoll.pipeline.options.view_as(
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_38_03-3900208256033440509?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:738: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_46_38-3526088093897914229?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:738: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_54_17-9491514618902090467?project=apache-beam-testing
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_01_36-11094438615314019737?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:738: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_09_50-8984471454297459431?project=apache-beam-testing
  kms_key=transform.kms_key))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/big_query_query_to_table_pipeline.py>:73: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_38_01-12820062103692776896?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:738: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_38_40-11342892494540425347?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1220: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_47_46-8291210641394865664?project=apache-beam-testing
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_55_42-10496174075279840959?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:797: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_04_26-7390284602985748749?project=apache-beam-testing
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_12_30-10144447328119908921?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:296: FutureWarning: MatchAll is experimental.
  | 'GetPath' >> beam.Map(lambda metadata: metadata.path))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:307: FutureWarning: MatchAll is experimental.
  | 'Checksums' >> beam.Map(compute_hash))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:307: FutureWarning: ReadMatches is experimental.
  | 'Checksums' >> beam.Map(compute_hash))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_38_03-7268848002221601100?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_46_23-4825873710356543153?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_54_24-16637215368942373057?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_03_08-17395061818125069478?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_10_53-13752873172500287978?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_18_19-12302103494057515987?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_38_03-3114461064923458710?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_47_10-12098246525024657177?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1220: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_08_56_02-13712684946897800063?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:797: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_05_27-13853136771981703511?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-12-12_09_14_24-13901130639713679249?project=apache-beam-testing
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1220: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:797: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py36.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 45 tests in 2908.226s

FAILED (SKIP=6, errors=1, failures=1)

> Task :sdks:python:test-suites:dataflow:py36:postCommitIT FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/test-suites/dataflow/py36/build.gradle'> line: 56

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py36:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 49m 48s
83 actionable tasks: 63 executed, 20 from cache

Publishing build scan...
https://scans.gradle.com/s/dtra2juwndcns

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Jenkins build is back to normal : beam_PostCommit_Python36 #1209

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python36/1209/display/redirect?page=changes>


---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org