You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2020/06/02 09:29:34 UTC

Build failed in Jenkins: beam_PerformanceTests_WordCountIT_Py37 #1503

See <https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/1503/display/redirect>

Changes:


------------------------------------------
[...truncated 260.12 KB...]
            "output_name": "out",
            "step_name": "SideInput-s19"
          },
          "python_side_input2-write/Write/WriteImpl/FinalizeWrite": {
            "@type": "OutputReference",
            "output_name": "out",
            "step_name": "SideInput-s20"
          }
        },
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value",
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_5"
                    },
                    {
                      "@type": "FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
                      "component_encodings": [],
                      "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_5"
                    }
                  ],
                  "is_pair_like": true,
                  "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_5"
                },
                {
                  "@type": "kind:global_window"
                }
              ],
              "is_wrapper": true
            },
            "output_name": "None",
            "user_name": "write/Write/WriteImpl/FinalizeWrite.out"
          }
        ],
        "parallel_input": {
          "@type": "OutputReference",
          "output_name": "out",
          "step_name": "s7"
        },
        "serialized_fn": "eNrNWHd8HEcVvjvJbe04LsRgUriYGFYhd2fZ2LFNSEjOlmMOy8pKiZdiNnu7czdrbbk3MytZQZvEcVRwEggQQjNg00LvEHrvJaFjOoRO6DVU583sneQTsvB/5idpd+/tvG/nve+99+3pxg7dsRu2Q4lVJXZQFMwOeS1iAS86ESNa2fZ9u+qTPcxuNAjbFvWEGmS6DkA2gZxudmQyGasWQkcbiBfhb9XmRLNqXmj73nXEGmaeIBp0mkuUS4ywrM4bxIF5veYCtDVY5BDOYT7tNBfLNWKkQSzqhYLDAsf1fL9oyaNmOYzYglihHRBXxA0fYRcql527B9DnSuWySFm8sBELhcRBU4+OYjFtWmzOlybm1b0QlkzAGWYeP7flA9epPRRdggmxRcQ4LJ2AMw1Y1tu7F5ZPwAoDVsbVvfAQfWYSBNkvvEizBvDc74WDGpyFmVuVwEN1cxk+qOb5xGrYgloNRmrefnhYG0LUQO+QF4dsP8Z1LBryXMK0fmELz7lGGvtaNliNwA9P4Gzd1BA49ZB7h3PaUudHtqvsGpyryOOCwXlj8AgD8ua8liecb+7G6zrfWioJEjQKHMO266SAhVEgoVsQUXoiXPBSY6TgiYLjR7FbSrNb6t64pXvd5i3dGzev37KhxAiPfaRkTVydDluyZ/G4JsN+JJXbvyCBtTo9h+bp+SauysCjcL2MJowDi1ObuRweXcmobToRRg3tCVc23jxpA9EVI7i/cqTS04X4FybwmKq5XAYtwdIdyFK3BVxkXoD2wlq9eSsOutZtdAtRDU3Tj5c2KJir2iHqflRt4RTVzcKFs3qWVPROFCDbnCO1KUPrzE5ZdLGIoNtchJeBFzTJW6/il2VUavg21ugGc7VsDdmLIRLJbM/3wroVkmE8Y0E/dlL1kkWJLRO0sTeuthdlKAjDjizGwvO1y1k9Dkgo+nzbITTyVao2YaoupvK4uZJV5y0kga174XG6uUn26YiguHWOdWep9lpXUL1d2jN93Bk0/FJPs/WVBS5p20dD1ZlmXR02PGfQJ24/wu2UaBo8PoFLdZUT1xY2XDab49TybbhEgyfgLi9P4ArdXCpT6chBIhtLBgtl82w0Su+t05veinticrBtHeqGbaoqhr3QjYatAJMrc4pTbfvJRqPEUTBcU/TbvvVf3hr0KC6GPDIswXbMNsNqcejIJtfgSp2eay7E9bJi5RiDnWPwRAMqeiVbyeBfR2VlWZvMjGYmswdz/Rl4Uu8Y7OpSLq1YAE27TYaWEo0CUtpHwkEv5K1zgfv2ECkNR2yQY1ykJMOy+ghTxRs6ZED2s7UnYm45ikOxc8DqG9lwcYkzp8TdQdnpkvrSCUkppXwUGyPQp7ZyiW8HVde+FK7adShbzoBhniU7nkWBxRBSVvbUbvvVQFb5aY46GBiHq7sEXGPAnrZ01YmwkE4sT1M9php7vsCQ4Mkqx3hb3oWnjMNTDXham6sXNCImrCByY6kUe80VM0Z8GgI8fQwsA65V8Bb6OsKywB6HqgEOvap3NvYcgh/Apcgd0pRDmjoqCyrl8rUiI7JHcqMZN3s0dzCb5EZzfM1o9kiHmxOdBzNHkT4xbxTvuB2DObZpNOd2rs6gbb47L7WLBamtedUhr2odq/B4U9bFlUD03kpOhe6Smo3TFWqyTMzr0dJXjnyfqLLKR7U8xxbMr3Xzw56g+QA1PS+ojXdCkic+ke2fTxuGuHmb5210COs+EegtqSnmezzGRV4MR631PE9CRxYIYdIHEdes5WsuUsci1AXQtH19jwvw1EiTnIso8jnsU5Lrcd9zCAwqDUL2wFeduz1oiJGp3oZA3fZJCKEahErztjMWMYjoeQIaZk5hA6hUtIqIqafLEQh8AgTtU0PZmr0I410PZMvLMrll2c7s0uyS7MJsLpvLwVAXluGwAfupQ+Pe3gRGegVcZ8AzxmDUgIS6CVyvn2Rs3EB7ThCpG+cQqT3Kl7gqsKZUHcBZdlMCB3VzpURX712ulSp6injzHIg9Nhd9zAs84Q1Nqd8YQo4nMGGeqfKAr3RV2xlsok3OgdanhnMT5ZmIciiBW6q8qoBkHrmwg0YT6Na51Li1tol1G2I9K4FnV9UIaKYwhbl9DpgdatamWWsiPQeRnpvA81RZpWmCO042t9MHtcFo8HxEuDOBF6iqsVBX4YVt/lIqeXEq2ulYNHgRur44gZeohweewyIOhw8tPHZYv+/+48dv4FX5E1fjMXipAS8bh5cncATr6KgBr4irdDvtoTqVfL8ygVfp9GYqiXp1Aq+hk1Rm+64EXosA9FYq8/W6BF5fpbdTGfEbEngjvYPeqdAn4E2zK3P3KSnzmykq7lt0ehmVMvrWBN6m0/+tmW9X29/RnH2pRGUr/eXOUTmf3oFK9M4uisd3nS49ene7Ht29a02OGrKr32PAe7Gr75Zd/T5k4/0GfKCNjQ8m8KEWGx9O4CPIxl1TLHw0gY+1WPh4Ap84gYVPzs7C+lNi4VOShU+3WPhMAp89FRY+NwsLM18UPo8sfEFx8cXTxcU97Vzci+8G1KD9FOX+S8jIlw34CjJyby89HVr6Vaml9P9GP78m4Os69eg+Okh9GtCQKq37BgXKKOrZN+k9c+nZsdn17Fuy8r9twHcwz8dk5X8XK/97Bnx/DH5gwA+lnv3oZHp2X5ue/Vj1yE8S+GmrR36WwM+bE+sXCfxyemL9KoH7W73y6wR+M9UrY/BbA343Dr9P4A+4jz8a8Ke2DvxzAn9pof81gb+1deADCfy9hfqPBP55Qgf+i8z5z4x04mNUGvwbnf+TwHFdVWYacy1kmSyGeeriwTWWRY+uAyyHJ/WmI5hXr2PbhKxjDqzmKm1b+vY20PzIOlO0eRJtefpVJg5i35bFKF9hCZuPtypZ9U0SdVy+PFh4m+CXP84WzHziTOnaFjMFpbGF6YMW4SlhGh7p4UoGhX3FDGEPqvi9krHF6VPV/2U8bjVfOtkSNE/GVcHOwIvig1jSYc0=",
        "user_name": "write/Write/WriteImpl/FinalizeWrite/FinalizeWrite"
      }
    }
  ],
  "type": "JOB_TYPE_BATCH"
}
apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: <Job
 createTime: '2020-06-02T09:12:43.259569Z'
 currentStateTime: '1970-01-01T00:00:00Z'
 id: '2020-06-02_02_12_41-9636899939901279817'
 location: 'us-central1'
 name: 'performance-tests-wordcount-python37-batch-1gb0602091047'
 projectId: 'apache-beam-testing'
 stageStates: []
 startTime: '2020-06-02T09:12:43.259569Z'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id: [2020-06-02_02_12_41-9636899939901279817]
apache_beam.runners.dataflow.internal.apiclient: INFO: Submitted job: 2020-06-02_02_12_41-9636899939901279817
apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow/jobs/us-central1/2020-06-02_02_12_41-9636899939901279817?project=apache-beam-testing
apache_beam.runners.dataflow.dataflow_runner: INFO: Job 2020-06-02_02_12_41-9636899939901279817 is in state JOB_STATE_RUNNING
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:45.929Z: JOB_MESSAGE_BASIC: Worker configuration: n1-standard-1 in us-central1-a.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.321Z: JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.363Z: JOB_MESSAGE_DEBUG: Combiner lifting skipped for step write/Write/WriteImpl/GroupByKey: GroupByKey not followed by a combiner.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.407Z: JOB_MESSAGE_DEBUG: Combiner lifting skipped for step group: GroupByKey not followed by a combiner.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.449Z: JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.486Z: JOB_MESSAGE_DETAILED: Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.602Z: JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.668Z: JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.702Z: JOB_MESSAGE_DETAILED: Fusing consumer split into read/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.741Z: JOB_MESSAGE_DETAILED: Fusing consumer pair_with_one into split
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.790Z: JOB_MESSAGE_DETAILED: Fusing consumer group/Reify into pair_with_one
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.817Z: JOB_MESSAGE_DETAILED: Fusing consumer group/Write into group/Reify
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.845Z: JOB_MESSAGE_DETAILED: Fusing consumer group/GroupByWindow into group/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.881Z: JOB_MESSAGE_DETAILED: Fusing consumer count into group/GroupByWindow
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.924Z: JOB_MESSAGE_DETAILED: Fusing consumer format into count
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:47.963Z: JOB_MESSAGE_DETAILED: Fusing consumer write/Write/WriteImpl/WindowInto(WindowIntoFn) into format
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.003Z: JOB_MESSAGE_DETAILED: Fusing consumer write/Write/WriteImpl/WriteBundles/WriteBundles into write/Write/WriteImpl/WindowInto(WindowIntoFn)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.040Z: JOB_MESSAGE_DETAILED: Fusing consumer write/Write/WriteImpl/Pair into write/Write/WriteImpl/WriteBundles/WriteBundles
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.080Z: JOB_MESSAGE_DETAILED: Fusing consumer write/Write/WriteImpl/GroupByKey/Reify into write/Write/WriteImpl/Pair
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.117Z: JOB_MESSAGE_DETAILED: Fusing consumer write/Write/WriteImpl/GroupByKey/Write into write/Write/WriteImpl/GroupByKey/Reify
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.153Z: JOB_MESSAGE_DETAILED: Fusing consumer write/Write/WriteImpl/GroupByKey/GroupByWindow into write/Write/WriteImpl/GroupByKey/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.190Z: JOB_MESSAGE_DETAILED: Fusing consumer write/Write/WriteImpl/Extract into write/Write/WriteImpl/GroupByKey/GroupByWindow
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.232Z: JOB_MESSAGE_DETAILED: Fusing consumer write/Write/WriteImpl/InitializeWrite into write/Write/WriteImpl/DoOnce/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.283Z: JOB_MESSAGE_DEBUG: Workflow config is missing a default resource spec.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.325Z: JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.370Z: JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.403Z: JOB_MESSAGE_DEBUG: Assigning stage ids.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.696Z: JOB_MESSAGE_DEBUG: Executing wait step start26
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.777Z: JOB_MESSAGE_BASIC: Executing operation write/Write/WriteImpl/DoOnce/Read+write/Write/WriteImpl/InitializeWrite
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.808Z: JOB_MESSAGE_BASIC: Executing operation write/Write/WriteImpl/GroupByKey/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.820Z: JOB_MESSAGE_DEBUG: Starting **** pool setup.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.841Z: JOB_MESSAGE_BASIC: Executing operation group/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.852Z: JOB_MESSAGE_BASIC: Starting 10 ****s in us-central1-a...
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.910Z: JOB_MESSAGE_BASIC: Finished operation group/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.910Z: JOB_MESSAGE_BASIC: Finished operation write/Write/WriteImpl/GroupByKey/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:48.990Z: JOB_MESSAGE_DEBUG: Value "write/Write/WriteImpl/GroupByKey/Session" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:49.026Z: JOB_MESSAGE_DEBUG: Value "group/Session" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:12:49.100Z: JOB_MESSAGE_BASIC: Executing operation read/Read+split+pair_with_one+group/Reify+group/Write
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:13:13.766Z: JOB_MESSAGE_WARNING: Your project already contains 100 Dataflow-created metric descriptors and Stackdriver will not create new Dataflow custom metrics for this job. Each unique user-defined metric name (independent of the DoFn in which it is defined) produces a new metric descriptor. To delete old / unused metric descriptors see https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list and https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:13:18.181Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of ****s to 8 based on the rate of progress in the currently running stage(s).
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:13:18.218Z: JOB_MESSAGE_DETAILED: Resized **** pool to 8, though goal was 10.  This could be a quota issue.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:13:23.683Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of ****s to 10 based on the rate of progress in the currently running stage(s).
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:14:55.185Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:14:55.216Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:18:25.099Z: JOB_MESSAGE_BASIC: Finished operation write/Write/WriteImpl/DoOnce/Read+write/Write/WriteImpl/InitializeWrite
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:18:25.172Z: JOB_MESSAGE_DEBUG: Value "write/Write/WriteImpl/DoOnce/Read.out" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:18:25.203Z: JOB_MESSAGE_DEBUG: Value "write/Write/WriteImpl/InitializeWrite.out" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:18:25.264Z: JOB_MESSAGE_BASIC: Executing operation write/Write/WriteImpl/WriteBundles/_UnpickledSideInput(InitializeWrite.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:18:25.293Z: JOB_MESSAGE_BASIC: Executing operation write/Write/WriteImpl/FinalizeWrite/_UnpickledSideInput(InitializeWrite.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:18:25.313Z: JOB_MESSAGE_BASIC: Finished operation write/Write/WriteImpl/WriteBundles/_UnpickledSideInput(InitializeWrite.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:18:25.332Z: JOB_MESSAGE_BASIC: Executing operation write/Write/WriteImpl/PreFinalize/_UnpickledSideInput(InitializeWrite.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:18:25.352Z: JOB_MESSAGE_BASIC: Finished operation write/Write/WriteImpl/FinalizeWrite/_UnpickledSideInput(InitializeWrite.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:18:25.391Z: JOB_MESSAGE_BASIC: Finished operation write/Write/WriteImpl/PreFinalize/_UnpickledSideInput(InitializeWrite.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:18:25.396Z: JOB_MESSAGE_DEBUG: Value "write/Write/WriteImpl/WriteBundles/_UnpickledSideInput(InitializeWrite.out.0).output" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:18:25.436Z: JOB_MESSAGE_DEBUG: Value "write/Write/WriteImpl/FinalizeWrite/_UnpickledSideInput(InitializeWrite.out.0).output" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:18:25.476Z: JOB_MESSAGE_DEBUG: Value "write/Write/WriteImpl/PreFinalize/_UnpickledSideInput(InitializeWrite.out.0).output" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:20:01.747Z: JOB_MESSAGE_BASIC: Finished operation read/Read+split+pair_with_one+group/Reify+group/Write
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:20:01.846Z: JOB_MESSAGE_BASIC: Executing operation group/Close
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:20:01.895Z: JOB_MESSAGE_BASIC: Finished operation group/Close
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:20:01.997Z: JOB_MESSAGE_BASIC: Executing operation group/Read+group/GroupByWindow+count+format+write/Write/WriteImpl/WindowInto(WindowIntoFn)+write/Write/WriteImpl/WriteBundles/WriteBundles+write/Write/WriteImpl/Pair+write/Write/WriteImpl/GroupByKey/Reify+write/Write/WriteImpl/GroupByKey/Write
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:42.573Z: JOB_MESSAGE_BASIC: Finished operation group/Read+group/GroupByWindow+count+format+write/Write/WriteImpl/WindowInto(WindowIntoFn)+write/Write/WriteImpl/WriteBundles/WriteBundles+write/Write/WriteImpl/Pair+write/Write/WriteImpl/GroupByKey/Reify+write/Write/WriteImpl/GroupByKey/Write
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:42.715Z: JOB_MESSAGE_BASIC: Executing operation write/Write/WriteImpl/GroupByKey/Close
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:42.780Z: JOB_MESSAGE_BASIC: Finished operation write/Write/WriteImpl/GroupByKey/Close
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:42.853Z: JOB_MESSAGE_BASIC: Executing operation write/Write/WriteImpl/GroupByKey/Read+write/Write/WriteImpl/GroupByKey/GroupByWindow+write/Write/WriteImpl/Extract
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:45.413Z: JOB_MESSAGE_BASIC: Finished operation write/Write/WriteImpl/GroupByKey/Read+write/Write/WriteImpl/GroupByKey/GroupByWindow+write/Write/WriteImpl/Extract
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:45.491Z: JOB_MESSAGE_DEBUG: Value "write/Write/WriteImpl/Extract.out" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:45.557Z: JOB_MESSAGE_BASIC: Executing operation write/Write/WriteImpl/FinalizeWrite/_UnpickledSideInput(Extract.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:45.583Z: JOB_MESSAGE_BASIC: Executing operation write/Write/WriteImpl/PreFinalize/_UnpickledSideInput(Extract.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:45.610Z: JOB_MESSAGE_BASIC: Finished operation write/Write/WriteImpl/FinalizeWrite/_UnpickledSideInput(Extract.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:45.639Z: JOB_MESSAGE_BASIC: Finished operation write/Write/WriteImpl/PreFinalize/_UnpickledSideInput(Extract.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:45.677Z: JOB_MESSAGE_DEBUG: Value "write/Write/WriteImpl/FinalizeWrite/_UnpickledSideInput(Extract.out.0).output" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:45.713Z: JOB_MESSAGE_DEBUG: Value "write/Write/WriteImpl/PreFinalize/_UnpickledSideInput(Extract.out.0).output" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:45.785Z: JOB_MESSAGE_BASIC: Executing operation write/Write/WriteImpl/PreFinalize/PreFinalize
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:48.211Z: JOB_MESSAGE_BASIC: Finished operation write/Write/WriteImpl/PreFinalize/PreFinalize
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:48.288Z: JOB_MESSAGE_DEBUG: Value "write/Write/WriteImpl/PreFinalize.out" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:48.354Z: JOB_MESSAGE_BASIC: Executing operation write/Write/WriteImpl/FinalizeWrite/_UnpickledSideInput(PreFinalize.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:48.404Z: JOB_MESSAGE_BASIC: Finished operation write/Write/WriteImpl/FinalizeWrite/_UnpickledSideInput(PreFinalize.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:48.476Z: JOB_MESSAGE_DEBUG: Value "write/Write/WriteImpl/FinalizeWrite/_UnpickledSideInput(PreFinalize.out.0).output" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:48.543Z: JOB_MESSAGE_BASIC: Executing operation write/Write/WriteImpl/FinalizeWrite/FinalizeWrite
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:51.061Z: JOB_MESSAGE_BASIC: Finished operation write/Write/WriteImpl/FinalizeWrite/FinalizeWrite
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:51.120Z: JOB_MESSAGE_DEBUG: Executing success step success24
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:51.306Z: JOB_MESSAGE_DETAILED: Cleaning up.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:51.380Z: JOB_MESSAGE_DEBUG: Starting **** pool teardown.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:22:51.409Z: JOB_MESSAGE_BASIC: Stopping **** pool...
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:24:35.075Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized **** pool from 10 to 0.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:24:35.183Z: JOB_MESSAGE_BASIC: Worker pool stopped.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-06-02T09:24:35.223Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
apache_beam.runners.dataflow.dataflow_runner: INFO: Job 2020-06-02_02_12_41-9636899939901279817 is in state JOB_STATE_DONE
apache_beam.io.filesystem: DEBUG: Listing files in 'gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results'
apache_beam.io.filesystem: DEBUG: translate_pattern: 'gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results*-of-*' -> 'gs://temp\\-storage\\-for\\-end\\-to\\-end\\-tests/py\\-it\\-cloud/output/1591089158293/results[^/\\\\]*\\-of\\-[^/\\\\]*'
apache_beam.io.gcp.gcsio: INFO: Starting the size estimation of the input
apache_beam.io.gcp.gcsio: INFO: Finished listing 30 files in 0.12344193458557129 seconds.
apache_beam.testing.pipeline_verifiers: INFO: Find 30 files in gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results*-of-*: 
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00000-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00001-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00002-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00003-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00004-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00005-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00006-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00007-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00008-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00009-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00010-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00011-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00012-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00013-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00014-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00015-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00016-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00017-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00018-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00019-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00020-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00021-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00022-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00023-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00024-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00025-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00026-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00027-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00028-of-00030
gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results-00029-of-00030
apache_beam.testing.pipeline_verifiers: INFO: Read from given path gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results*-of-*, 26186927 lines, checksum: ea0ca2e5ee4ea5f218790f28d0b9fe7d09d8d710.
root: INFO: average word length: 19
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
urllib3.util.retry: DEBUG: Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None)
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token HTTP/1.1" 200 192
urllib3.connectionpool: DEBUG: Starting new HTTPS connection (1): bigquery.googleapis.com:443
urllib3.connectionpool: DEBUG: https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/datasets/beam_performance HTTP/1.1" 200 None
urllib3.connectionpool: DEBUG: https://bigquery.googleapis.com:443 "GET /bigquery/v2/projects/apache-beam-testing/datasets/beam_performance/tables/wordcount_py37_pkb_results HTTP/1.1" 200 None
apache_beam.io.filesystem: DEBUG: Listing files in 'gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results'
apache_beam.io.filesystem: DEBUG: translate_pattern: 'gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1591089158293/results*' -> 'gs://temp\\-storage\\-for\\-end\\-to\\-end\\-tests/py\\-it\\-cloud/output/1591089158293/results[^/\\\\]*'
apache_beam.io.gcp.gcsio: INFO: Starting the size estimation of the input
apache_beam.io.gcp.gcsio: INFO: Finished listing 30 files in 0.1314389705657959 seconds.
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
XML: nosetests-runPerformanceTest-df-py37.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 1 test in 1010.854s

FAILED (errors=1)

> Task :sdks:python:test-suites:dataflow:py37:runPerformanceTest FAILED
:sdks:python:test-suites:dataflow:py37:runPerformanceTest (Thread[Execution **** for ':',5,main]) completed. Took 16 mins 55.369 secs.

FAILURE: Build failed with an exception.

* Where:
Script '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/sdks/python/test-suites/dataflow/common.gradle'> line: 192

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py37:runPerformanceTest'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 18m 5s
5 actionable tasks: 5 executed

Publishing build scan...
https://gradle.com/s/x5wyk24zl4mya

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Jenkins build is back to normal : beam_PerformanceTests_WordCountIT_Py37 #1505

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/1505/display/redirect>


---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PerformanceTests_WordCountIT_Py37 #1504

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/1504/display/redirect?page=changes>

Changes:

[github] [BEAM-9723] Add DLP integration transforms (#11566)


------------------------------------------
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-9 (beam) in workspace <https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/>
No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init <https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src> # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/*
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision b6ca2aba5a0141eed5bed29a9948e2c65874254f (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f b6ca2aba5a0141eed5bed29a9948e2c65874254f
Commit message: "[BEAM-9723] Add DLP integration transforms (#11566)"
 > git rev-list --no-walk 01c11e7211937bde3c238fe3639f9dfe7774d093 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1

[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ <https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/gradlew> --info "-Ptest-pipeline-options=--project=apache-beam-testing --region=us-central1 --staging_location=gs://temp-storage-for-end-to-end-tests/staging-it --temp_location=gs://temp-storage-for-end-to-end-tests/temp-it --job_name=performance-tests-wordcount-python37-batch-1gb0602123322 --runner=TestDataflowRunner --publish_to_big_query=true --metrics_dataset=beam_performance --metrics_table=wordcount_py37_pkb_results --influx_measurement=wordcount_py37_results --influx_db_name=beam_test_metrics --influx_hostname=http://10.128.0.96:8086 --input=gs://apache-beam-samples/input_small_files/ascii_sort_1MB_input.0000* --output=gs://temp-storage-for-end-to-end-tests/py-it-cloud/output --expect_checksum=ea0ca2e5ee4ea5f218790f28d0b9fe7d09d8d710 --num_****s=10 --autoscaling_algorithm=NONE" -Ptest=apache_beam.examples.wordcount_it_test:WordCountIT.test_wordcount_it --continue --max-****s=12 -Dorg.gradle.jvmargs=-Xms2g -Dorg.gradle.jvmargs=-Xmx4g -Pdocker-pull-licenses :sdks:python:test-suites:dataflow:py37:runPerformanceTest
Initialized native services in: /home/jenkins/.gradle/native
Removing 0 daemon stop events from registry
Starting a Gradle Daemon (subsequent builds will be faster)
Starting process 'Gradle build daemon'. Working directory: /home/jenkins/.gradle/daemon/5.2.1 Command: /usr/lib/jvm/java-8-openjdk-amd64/bin/java -Xmx4g -Dfile.encoding=UTF-8 -Duser.country=US -Duser.language=en -Duser.variant -cp /home/jenkins/.gradle/wrapper/dists/gradle-5.2.1-all/bviwmvmbexq6idcscbicws5me/gradle-5.2.1/lib/gradle-launcher-5.2.1.jar org.gradle.launcher.daemon.bootstrap.GradleDaemon 5.2.1
Successfully started process 'Gradle build daemon'
An attempt to start the daemon took 1.033 secs.
The client will now receive all logging from the daemon (pid: 2936). The daemon log file: /home/jenkins/.gradle/daemon/5.2.1/daemon-2936.out.log
Starting build in new daemon [memory: 3.8 GB]
Closing daemon's stdin at end of input.
The daemon will no longer process any standard input.
Using 12 **** leases.
Starting Build
Using local directory build cache for build ':buildSrc' (location = /home/jenkins/.gradle/caches/build-cache-1, removeUnusedEntriesAfter = 7 days).

> Configure project :buildSrc
Evaluating project ':buildSrc' using build file '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/buildSrc/build.gradle'.>
Selected primary task 'build' from project :
file or directory '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/buildSrc/src/test/groovy',> not found
:buildSrc:compileJava (Thread[Execution **** for ':buildSrc',5,main]) started.

> Task :buildSrc:compileJava NO-SOURCE
file or directory '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/buildSrc/src/main/java',> not found
Skipping task ':buildSrc:compileJava' as it has no source files and no previous output files.
:buildSrc:compileJava (Thread[Execution **** for ':buildSrc',5,main]) completed. Took 0.065 secs.
:buildSrc:compileGroovy (Thread[Execution **** for ':buildSrc',5,main]) started.

> Task :buildSrc:compileGroovy FROM-CACHE
Build cache key for task ':buildSrc:compileGroovy' is 4a811490342dcc3d3beeca48174027a4
Task ':buildSrc:compileGroovy' is not up-to-date because:
  No history is available.
Origin for org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$TaskExecution@2c3cf8a6: {executionTime=4851, hostName=apache-beam-jenkins-9, operatingSystem=Linux, buildInvocationId=5ds3kmrfmzeztozlvp56vufpty, creationTime=1591041576961, identity=:compileGroovy, type=org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.TaskExecution, userName=jenkins, gradleVersion=5.2.1, rootPath=/home/jenkins/jenkins-slave/workspace/beam_PreCommit_Python2_PVR_Flink_Commit/src/buildSrc}
Unpacked trees for task ':buildSrc:compileGroovy' from cache.
:buildSrc:compileGroovy (Thread[Execution **** for ':buildSrc',5,main]) completed. Took 0.205 secs.
:buildSrc:pluginDescriptors (Thread[Execution **** for ':buildSrc',5,main]) started.

> Task :buildSrc:pluginDescriptors
Build cache key for task ':buildSrc:pluginDescriptors' is db14b8b6679df7db8b87deb94d04a710
Caching disabled for task ':buildSrc:pluginDescriptors': Caching has not been enabled for the task
Task ':buildSrc:pluginDescriptors' is not up-to-date because:
  No history is available.
:buildSrc:pluginDescriptors (Thread[Execution **** for ':buildSrc',5,main]) completed. Took 0.036 secs.
:buildSrc:processResources (Thread[Execution **** for ':buildSrc',5,main]) started.

> Task :buildSrc:processResources
file or directory '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/buildSrc/src/main/resources',> not found
Build cache key for task ':buildSrc:processResources' is d9c17925b763b8381ce3e616f3244816
Caching disabled for task ':buildSrc:processResources': Caching has not been enabled for the task
Task ':buildSrc:processResources' is not up-to-date because:
  No history is available.
file or directory '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/buildSrc/src/main/resources',> not found
:buildSrc:processResources (Thread[Execution **** for ':buildSrc',5,main]) completed. Took 0.042 secs.
:buildSrc:classes (Thread[Execution **** for ':buildSrc',5,main]) started.

> Task :buildSrc:classes
Skipping task ':buildSrc:classes' as it has no actions.
:buildSrc:classes (Thread[Execution **** for ':buildSrc',5,main]) completed. Took 0.001 secs.
:buildSrc:jar (Thread[Execution **** for ':buildSrc',5,main]) started.

> Task :buildSrc:jar
Build cache key for task ':buildSrc:jar' is 85ee2c659508e5c4a199eac18d79e82a
Caching disabled for task ':buildSrc:jar': Caching has not been enabled for the task
Task ':buildSrc:jar' is not up-to-date because:
  No history is available.
:buildSrc:jar (Thread[Execution **** for ':buildSrc',5,main]) completed. Took 0.378 secs.
:buildSrc:assemble (Thread[Execution **** for ':buildSrc',5,main]) started.

> Task :buildSrc:assemble
Skipping task ':buildSrc:assemble' as it has no actions.
:buildSrc:assemble (Thread[Execution **** for ':buildSrc',5,main]) completed. Took 0.0 secs.
:buildSrc:spotlessGroovy (Thread[Execution **** for ':buildSrc',5,main]) started.

> Task :buildSrc:spotlessGroovy
file or directory '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/buildSrc/src/test/groovy',> not found
file or directory '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/buildSrc/src/test/groovy',> not found
file or directory '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/buildSrc/src/test/groovy',> not found
Caching disabled for task ':buildSrc:spotlessGroovy': Caching has not been enabled for the task
Task ':buildSrc:spotlessGroovy' is not up-to-date because:
  No history is available.
All input files are considered out-of-date for incremental task ':buildSrc:spotlessGroovy'.
:buildSrc:spotlessGroovy (Thread[Execution **** for ':buildSrc',5,main]) completed. Took 2.509 secs.
:buildSrc:spotlessGroovyCheck (Thread[Execution **** for ':buildSrc',5,main]) started.

> Task :buildSrc:spotlessGroovyCheck
Skipping task ':buildSrc:spotlessGroovyCheck' as it has no actions.
:buildSrc:spotlessGroovyCheck (Thread[Execution **** for ':buildSrc',5,main]) completed. Took 0.0 secs.
:buildSrc:spotlessGroovyGradle (Thread[Execution **** for ':buildSrc',5,main]) started.

> Task :buildSrc:spotlessGroovyGradle
Caching disabled for task ':buildSrc:spotlessGroovyGradle': Caching has not been enabled for the task
Task ':buildSrc:spotlessGroovyGradle' is not up-to-date because:
  No history is available.
All input files are considered out-of-date for incremental task ':buildSrc:spotlessGroovyGradle'.
:buildSrc:spotlessGroovyGradle (Thread[Execution **** for ':buildSrc',5,main]) completed. Took 0.068 secs.
:buildSrc:spotlessGroovyGradleCheck (Thread[Execution **** for ':buildSrc',5,main]) started.

> Task :buildSrc:spotlessGroovyGradleCheck
Skipping task ':buildSrc:spotlessGroovyGradleCheck' as it has no actions.
:buildSrc:spotlessGroovyGradleCheck (Thread[Execution **** for ':buildSrc',5,main]) completed. Took 0.0 secs.
:buildSrc:spotlessCheck (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) started.

> Task :buildSrc:spotlessCheck
Skipping task ':buildSrc:spotlessCheck' as it has no actions.
:buildSrc:spotlessCheck (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) completed. Took 0.002 secs.
:buildSrc:pluginUnderTestMetadata (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) started.

> Task :buildSrc:pluginUnderTestMetadata
Build cache key for task ':buildSrc:pluginUnderTestMetadata' is 9bc89b98d64429ba4b6af015e30ea3f3
Caching disabled for task ':buildSrc:pluginUnderTestMetadata': Caching has not been enabled for the task
Task ':buildSrc:pluginUnderTestMetadata' is not up-to-date because:
  No history is available.
:buildSrc:pluginUnderTestMetadata (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) completed. Took 0.342 secs.
:buildSrc:compileTestJava (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) started.

> Task :buildSrc:compileTestJava NO-SOURCE
file or directory '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/buildSrc/src/test/java',> not found
Skipping task ':buildSrc:compileTestJava' as it has no source files and no previous output files.
:buildSrc:compileTestJava (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) completed. Took 0.004 secs.
:buildSrc:compileTestGroovy (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) started.

> Task :buildSrc:compileTestGroovy NO-SOURCE
file or directory '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/buildSrc/src/test/groovy',> not found
Skipping task ':buildSrc:compileTestGroovy' as it has no source files and no previous output files.
:buildSrc:compileTestGroovy (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) completed. Took 0.003 secs.
:buildSrc:processTestResources (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) started.

> Task :buildSrc:processTestResources NO-SOURCE
file or directory '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/buildSrc/src/test/resources',> not found
Skipping task ':buildSrc:processTestResources' as it has no source files and no previous output files.
:buildSrc:processTestResources (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) completed. Took 0.001 secs.
:buildSrc:testClasses (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) started.

> Task :buildSrc:testClasses UP-TO-DATE
Skipping task ':buildSrc:testClasses' as it has no actions.
:buildSrc:testClasses (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) completed. Took 0.0 secs.
:buildSrc:test (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) started.

> Task :buildSrc:test NO-SOURCE
Skipping task ':buildSrc:test' as it has no source files and no previous output files.
:buildSrc:test (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) completed. Took 0.005 secs.
:buildSrc:validateTaskProperties (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) started.

> Task :buildSrc:validateTaskProperties FROM-CACHE
Build cache key for task ':buildSrc:validateTaskProperties' is 5029c8ccd1d2829c533bc834056f98eb
Task ':buildSrc:validateTaskProperties' is not up-to-date because:
  No history is available.
Origin for org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$TaskExecution@44553e92: {executionTime=819, hostName=apache-beam-jenkins-9, operatingSystem=Linux, buildInvocationId=5ds3kmrfmzeztozlvp56vufpty, creationTime=1591041582781, identity=:validateTaskProperties, type=org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.TaskExecution, userName=jenkins, gradleVersion=5.2.1, rootPath=/home/jenkins/jenkins-slave/workspace/beam_PreCommit_Python2_PVR_Flink_Commit/src/buildSrc}
Unpacked trees for task ':buildSrc:validateTaskProperties' from cache.
:buildSrc:validateTaskProperties (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) completed. Took 0.016 secs.
:buildSrc:check (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) started.

> Task :buildSrc:check
Skipping task ':buildSrc:check' as it has no actions.
:buildSrc:check (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) completed. Took 0.0 secs.
:buildSrc:build (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) started.

> Task :buildSrc:build
Skipping task ':buildSrc:build' as it has no actions.
:buildSrc:build (Thread[Execution **** for ':buildSrc' Thread 4,5,main]) completed. Took 0.0 secs.
Settings evaluated using settings file '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/settings.gradle'.>
Using local directory build cache for the root build (location = /home/jenkins/.gradle/caches/build-cache-1, removeUnusedEntriesAfter = 7 days).
Projects loaded. Root project using build file '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/build.gradle'.>
Included projects: [root project 'beam', project ':beam-test-infra-metrics', project ':beam-test-tools', project ':examples', project ':model', project ':release', project ':runners', project ':sdks', project ':vendor', project ':website', project ':examples:java', project ':examples:kotlin', project ':model:fn-execution', project ':model:job-management', project ':model:pipeline', project ':runners:apex', project ':runners:core-construction-java', project ':runners:core-java', project ':runners:direct-java', project ':runners:extensions-java', project ':runners:flink', project ':runners:gearpump', project ':runners:google-cloud-dataflow-java', project ':runners:java-fn-execution', project ':runners:java-job-service', project ':runners:jet', project ':runners:local-java', project ':runners:portability', project ':runners:samza', project ':runners:spark', project ':sdks:go', project ':sdks:java', project ':sdks:python', project ':vendor:bytebuddy-1_10_8', project ':vendor:calcite-1_20_0', project ':vendor:grpc-1_26_0', project ':vendor:guava-26_0-jre', project ':vendor:sdks-java-extensions-protobuf', project ':runners:extensions-java:metrics', project ':runners:flink:1.10', project ':runners:flink:1.8', project ':runners:flink:1.9', project ':runners:google-cloud-dataflow-java:examples', project ':runners:google-cloud-dataflow-java:examples-streaming', project ':runners:google-cloud-dataflow-java:****', project ':runners:portability:java', project ':runners:samza:job-server', project ':runners:spark:job-server', project ':sdks:go:container', project ':sdks:go:examples', project ':sdks:go:test', project ':sdks:java:bom', project ':sdks:java:build-tools', project ':sdks:java:container', project ':sdks:java:core', project ':sdks:java:expansion-service', project ':sdks:java:extensions', project ':sdks:java:fn-execution', project ':sdks:java:harness', project ':sdks:java:io', project ':sdks:java:javadoc', project ':sdks:java:maven-archetypes', project ':sdks:java:testing', project ':sdks:python:apache_beam', project ':sdks:python:container', project ':sdks:python:test-suites', project ':runners:flink:1.10:job-server', project ':runners:flink:1.10:job-server-container', project ':runners:flink:1.8:job-server', project ':runners:flink:1.8:job-server-container', project ':runners:flink:1.9:job-server', project ':runners:flink:1.9:job-server-container', project ':runners:google-cloud-dataflow-java:****:legacy-****', project ':runners:google-cloud-dataflow-java:****:windmill', project ':runners:spark:job-server:container', project ':sdks:java:extensions:euphoria', project ':sdks:java:extensions:google-cloud-platform-core', project ':sdks:java:extensions:jackson', project ':sdks:java:extensions:join-library', project ':sdks:java:extensions:kryo', project ':sdks:java:extensions:ml', project ':sdks:java:extensions:protobuf', project ':sdks:java:extensions:sketching', project ':sdks:java:extensions:sorter', project ':sdks:java:extensions:sql', project ':sdks:java:extensions:zetasketch', project ':sdks:java:io:amazon-web-services', project ':sdks:java:io:amazon-web-services2', project ':sdks:java:io:amqp', project ':sdks:java:io:bigquery-io-perf-tests', project ':sdks:java:io:cassandra', project ':sdks:java:io:clickhouse', project ':sdks:java:io:common', project ':sdks:java:io:elasticsearch', project ':sdks:java:io:elasticsearch-tests', project ':sdks:java:io:expansion-service', project ':sdks:java:io:file-based-io-tests', project ':sdks:java:io:google-cloud-platform', project ':sdks:java:io:hadoop-common', project ':sdks:java:io:hadoop-file-system', project ':sdks:java:io:hadoop-format', project ':sdks:java:io:hbase', project ':sdks:java:io:hcatalog', project ':sdks:java:io:jdbc', project ':sdks:java:io:jms', project ':sdks:java:io:kafka', project ':sdks:java:io:kinesis', project ':sdks:java:io:kudu', project ':sdks:java:io:mongodb', project ':sdks:java:io:mqtt', project ':sdks:java:io:parquet', project ':sdks:java:io:rabbitmq', project ':sdks:java:io:redis', project ':sdks:java:io:snowflake', project ':sdks:java:io:solr', project ':sdks:java:io:synthetic', project ':sdks:java:io:thrift', project ':sdks:java:io:tika', project ':sdks:java:io:xml', project ':sdks:java:maven-archetypes:examples', project ':sdks:java:maven-archetypes:starter', project ':sdks:java:testing:expansion-service', project ':sdks:java:testing:load-tests', project ':sdks:java:testing:nexmark', project ':sdks:java:testing:test-utils', project ':sdks:python:apache_beam:testing', project ':sdks:python:container:py2', project ':sdks:python:container:py35', project ':sdks:python:container:py36', project ':sdks:python:container:py37', project ':sdks:python:container:py38', project ':sdks:python:test-suites:dataflow', project ':sdks:python:test-suites:direct', project ':sdks:python:test-suites:portable', project ':sdks:python:test-suites:tox', project ':sdks:java:extensions:sql:datacatalog', project ':sdks:java:extensions:sql:expansion-service', project ':sdks:java:extensions:sql:hcatalog', project ':sdks:java:extensions:sql:jdbc', project ':sdks:java:extensions:sql:perf-tests', project ':sdks:java:extensions:sql:shell', project ':sdks:java:extensions:sql:zetasql', project ':sdks:java:io:elasticsearch-tests:elasticsearch-tests-2', project ':sdks:java:io:elasticsearch-tests:elasticsearch-tests-5', project ':sdks:java:io:elasticsearch-tests:elasticsearch-tests-6', project ':sdks:java:io:elasticsearch-tests:elasticsearch-tests-7', project ':sdks:java:io:elasticsearch-tests:elasticsearch-tests-common', project ':sdks:python:apache_beam:testing:load_tests', project ':sdks:python:test-suites:dataflow:py2', project ':sdks:python:test-suites:dataflow:py35', project ':sdks:python:test-suites:dataflow:py36', project ':sdks:python:test-suites:dataflow:py37', project ':sdks:python:test-suites:direct:py2', project ':sdks:python:test-suites:direct:py35', project ':sdks:python:test-suites:direct:py36', project ':sdks:python:test-suites:direct:py37', project ':sdks:python:test-suites:portable:py2', project ':sdks:python:test-suites:portable:py35', project ':sdks:python:test-suites:portable:py36', project ':sdks:python:test-suites:portable:py37', project ':sdks:python:test-suites:tox:py2', project ':sdks:python:test-suites:tox:py35', project ':sdks:python:test-suites:tox:py36', project ':sdks:python:test-suites:tox:py37', project ':sdks:python:test-suites:tox:py38', project ':sdks:python:test-suites:tox:pycommon']
Configuration on demand is an incubating feature.

> Configure project :
Evaluating root project 'beam' using build file '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/build.gradle'.>
Offline dependencies root configured at 'offline-repository'

> Configure project :runners:google-cloud-dataflow-java:****
Evaluating project ':runners:google-cloud-dataflow-java:****' using build file '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/runners/google-cloud-dataflow-java/****/build.gradle'.>
Offline dependencies root configured at 'offline-repository'
Apply OSS Index Plugin

> Configure project :sdks:python
Evaluating project ':sdks:python' using build file '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/sdks/python/build.gradle'.>
Offline dependencies root configured at 'offline-repository'

> Configure project :sdks:python:test-suites:dataflow:py37
Evaluating project ':sdks:python:test-suites:dataflow:py37' using build file '<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/ws/src/sdks/python/test-suites/dataflow/py37/build.gradle'.>
Offline dependencies root configured at 'offline-repository'

FAILURE: Build failed with an exception.

* What went wrong:
Task 'runPerformanceTest' not found in project ':sdks:python:test-suites:dataflow:py37'.

* Try:
Run gradlew tasks to get a list of available tasks. Run with --stacktrace option to get the stack trace. Run with --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 13s

Publishing build scan...
https://gradle.com/s/ehyzyup3i5ufa

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org