You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2023/10/16 03:26:21 UTC

Build failed in Jenkins: beam_PerformanceTests_Compressed_TextIOIT_HDFS #7364

See <https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/7364/display/redirect>

Changes:


------------------------------------------
[...truncated 19.27 KB...]
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 14 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 15 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 16 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 17 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 18 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 19 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 20 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 21 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 22 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 23 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 24 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 25 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 26 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 27 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 28 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 29 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 30 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 31 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 32 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 33 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 34 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 35 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7364> --namespace=beam-performancetests-compressed-textioit-hdfs-7364 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 36 == \3\6 ]]
+ echo 'Command failed after 36 retries'
Command failed after 36 retries
+ return 1
Build step 'Execute shell' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Jenkins build is back to normal : beam_PerformanceTests_Compressed_TextIOIT_HDFS #7367

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/7367/display/redirect>


---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PerformanceTests_Compressed_TextIOIT_HDFS #7366

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/7366/display/redirect>

Changes:


------------------------------------------
[...truncated 572.17 KB...]
    		at org.apache.beam.runners.dataflow.****.AssignWindowsParDoFnFactory$AssignWindowsParDoFn.processElement(AssignWindowsParDoFnFactory.java:115)
    		at org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
    		at org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:54)
    		at org.apache.beam.runners.dataflow.****.SimpleParDoFn$1.output(SimpleParDoFn.java:285)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:275)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.access$900(SimpleDoFnRunner.java:85)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:423)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:411)
    		at org.apache.beam.sdk.testutils.metrics.TimeMonitor.processElement(TimeMonitor.java:42)
    		at org.apache.beam.sdk.testutils.metrics.TimeMonitor$DoFnInvoker.invokeProcessElement(Unknown Source)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:211)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:188)
    		at org.apache.beam.runners.dataflow.****.SimpleParDoFn.processElement(SimpleParDoFn.java:340)
    		at org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
    		at org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:54)
    		at org.apache.beam.runners.dataflow.****.SimpleParDoFn$1.output(SimpleParDoFn.java:285)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:275)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.access$900(SimpleDoFnRunner.java:85)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:423)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:411)
    		at org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn.processElement(FileBasedIOITHelper.java:49)
    		at org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn$DoFnInvoker.invokeProcessElement(Unknown Source)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:211)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:188)
    		at org.apache.beam.runners.dataflow.****.SimpleParDoFn.processElement(SimpleParDoFn.java:340)
    		at org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
    		at org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:54)
    		at org.apache.beam.runners.dataflow.****.util.common.****.ReadOperation.runReadLoop(ReadOperation.java:218)
    		at org.apache.beam.runners.dataflow.****.util.common.****.ReadOperation.start(ReadOperation.java:169)
    		at org.apache.beam.runners.dataflow.****.util.common.****.MapTaskExecutor.execute(MapTaskExecutor.java:83)
    		at org.apache.beam.runners.dataflow.****.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:304)
    		at org.apache.beam.runners.dataflow.****.BatchDataflowWorker.doWork(BatchDataflowWorker.java:276)
    		at org.apache.beam.runners.dataflow.****.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:206)
    		at org.apache.beam.runners.dataflow.****.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:147)
    		at org.apache.beam.runners.dataflow.****.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:127)
    		at org.apache.beam.runners.dataflow.****.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    		at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    		at org.apache.beam.sdk.util.UnboundedScheduledExecutorService$ScheduledFutureTask.run(UnboundedScheduledExecutorService.java:163)
    		at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
    		at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
    		at java.lang.Thread.run(Thread.java:750)
    	Caused by: [CIRCULAR REFERENCE: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /.temp-beam-37d1b203-92e9-44ba-b596-595f8f5cf997/f6766335608ae018-71d4-4f41-8c6d-fcb055fe86f2 could only be written to 0 of the 1 minReplication nodes. There are 0 datanode(s) running and 0 node(s) are excluded in this operation.
    	at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2315)
    	at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
    	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2960)
    	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:904)
    	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:593)
    	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:572)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:556)
    	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1093)
    	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1043)
    	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:971)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at javax.security.auth.Subject.doAs(Subject.java:422)
    	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
    	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2976)
    ]

    Oct 17, 2023 3:25:14 AM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
    SEVERE: 2023-10-17T03:25:12.803Z: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /.temp-beam-37d1b203-92e9-44ba-b596-595f8f5cf997/7a66f5b44541c1e3-0062-44ae-b1c1-d9dd5d460911 could only be written to 0 of the 1 minReplication nodes. There are 0 datanode(s) running and 0 node(s) are excluded in this operation.
    	at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2315)
    	at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
    	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2960)
    	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:904)
    	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:593)
    	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:572)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:556)
    	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1093)
    	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1043)
    	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:971)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at javax.security.auth.Subject.doAs(Subject.java:422)
    	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
    	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2976)

    	at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1560)
    	at org.apache.hadoop.ipc.Client.call(Client.java:1506)
    	at org.apache.hadoop.ipc.Client.call(Client.java:1403)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
    	at com.sun.proxy.$Proxy122.addBlock(Unknown Source)
    	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
    	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    	at java.lang.reflect.Method.invoke(Method.java:498)
    	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:433)
    	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166)
    	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158)
    	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96)
    	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362)
    	at com.sun.proxy.$Proxy123.addBlock(Unknown Source)
    	at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
    	at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
    	at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
    	Suppressed: java.io.IOException: Failed closing channel to hdfs://35.232.73.130:9000/.temp-beam-37d1b203-92e9-44ba-b596-595f8f5cf997/7a66f5b44541c1e3-0062-44ae-b1c1-d9dd5d460911
    		at org.apache.beam.sdk.io.FileBasedSink$Writer.close(FileBasedSink.java:1058)
    		at org.apache.beam.sdk.io.WriteFiles.writeOrClose(WriteFiles.java:650)
    		at org.apache.beam.sdk.io.WriteFiles.access$1100(WriteFiles.java:123)
    		at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesFn.processElement(WriteFiles.java:618)
    		at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesFn$DoFnInvoker.invokeProcessElement(Unknown Source)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:211)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:185)
    		at org.apache.beam.runners.dataflow.****.SimpleParDoFn.processElement(SimpleParDoFn.java:340)
    		at org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
    		at org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:54)
    		at org.apache.beam.runners.dataflow.****.AssignWindowsParDoFnFactory$AssignWindowsParDoFn.processElement(AssignWindowsParDoFnFactory.java:115)
    		at org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
    		at org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:54)
    		at org.apache.beam.runners.dataflow.****.SimpleParDoFn$1.output(SimpleParDoFn.java:285)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:275)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.access$900(SimpleDoFnRunner.java:85)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:423)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:411)
    		at org.apache.beam.sdk.testutils.metrics.TimeMonitor.processElement(TimeMonitor.java:42)
    		at org.apache.beam.sdk.testutils.metrics.TimeMonitor$DoFnInvoker.invokeProcessElement(Unknown Source)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:211)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:188)
    		at org.apache.beam.runners.dataflow.****.SimpleParDoFn.processElement(SimpleParDoFn.java:340)
    		at org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
    		at org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:54)
    		at org.apache.beam.runners.dataflow.****.SimpleParDoFn$1.output(SimpleParDoFn.java:285)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:275)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.access$900(SimpleDoFnRunner.java:85)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:423)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:411)
    		at org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn.processElement(FileBasedIOITHelper.java:49)
    		at org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn$DoFnInvoker.invokeProcessElement(Unknown Source)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:211)
    		at org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:188)
    		at org.apache.beam.runners.dataflow.****.SimpleParDoFn.processElement(SimpleParDoFn.java:340)
    		at org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
    		at org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:54)
    		at org.apache.beam.runners.dataflow.****.util.common.****.ReadOperation.runReadLoop(ReadOperation.java:218)
    		at org.apache.beam.runners.dataflow.****.util.common.****.ReadOperation.start(ReadOperation.java:169)
    		at org.apache.beam.runners.dataflow.****.util.common.****.MapTaskExecutor.execute(MapTaskExecutor.java:83)
    		at org.apache.beam.runners.dataflow.****.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:304)
    		at org.apache.beam.runners.dataflow.****.BatchDataflowWorker.doWork(BatchDataflowWorker.java:276)
    		at org.apache.beam.runners.dataflow.****.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:206)
    		at org.apache.beam.runners.dataflow.****.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:147)
    		at org.apache.beam.runners.dataflow.****.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:127)
    		at org.apache.beam.runners.dataflow.****.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    		at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    		at org.apache.beam.sdk.util.UnboundedScheduledExecutorService$ScheduledFutureTask.run(UnboundedScheduledExecutorService.java:163)
    		at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
    		at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
    		at java.lang.Thread.run(Thread.java:750)
    	Caused by: [CIRCULAR REFERENCE: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /.temp-beam-37d1b203-92e9-44ba-b596-595f8f5cf997/7a66f5b44541c1e3-0062-44ae-b1c1-d9dd5d460911 could only be written to 0 of the 1 minReplication nodes. There are 0 datanode(s) running and 0 node(s) are excluded in this operation.
    	at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2315)
    	at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
    	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2960)
    	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:904)
    	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:593)
    	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:572)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:556)
    	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1093)
    	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1043)
    	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:971)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at javax.security.auth.Subject.doAs(Subject.java:422)
    	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
    	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2976)
    ]

    Oct 17, 2023 3:25:14 AM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
    INFO: 2023-10-17T03:25:12.921Z: Finished operation Generate sequence/Read(BoundedCountingSource)+Produce text lines+Collect write start time+Write content to files/WriteFiles/RewindowIntoGlobal/Window.Assign+Write content to files/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles+Write content to files/WriteFiles/GatherTempFileResults/Consolidate/Pair with random key+Write content to files/WriteFiles/GatherTempFileResults/Consolidate/Reshuffle/Window.Into()/Window.Assign+Write content to files/WriteFiles/GatherTempFileResults/Consolidate/Reshuffle/GroupByKey/Reify+Write content to files/WriteFiles/GatherTempFileResults/Consolidate/Reshuffle/GroupByKey/Write+Write content to files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify+Write content to files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write
    Oct 17, 2023 3:25:14 AM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
    SEVERE: 2023-10-17T03:25:13.020Z: Workflow failed. Causes: S03:Generate sequence/Read(BoundedCountingSource)+Produce text lines+Collect write start time+Write content to files/WriteFiles/RewindowIntoGlobal/Window.Assign+Write content to files/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles+Write content to files/WriteFiles/GatherTempFileResults/Consolidate/Pair with random key+Write content to files/WriteFiles/GatherTempFileResults/Consolidate/Reshuffle/Window.Into()/Window.Assign+Write content to files/WriteFiles/GatherTempFileResults/Consolidate/Reshuffle/GroupByKey/Reify+Write content to files/WriteFiles/GatherTempFileResults/Consolidate/Reshuffle/GroupByKey/Write+Write content to files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify+Write content to files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write failed., The job failed because a work item has failed 4 times. Look in previous log entries for the cause of each one of the 4 failures. If the logs only contain generic timeout errors related to accessing external resources, such as MongoDB, verify that the **** service account has permission to access the resource's subnetwork. For more information, see https://cloud.google.com/dataflow/docs/guides/common-errors. The work item was attempted on these ****s: 

          Root cause: Work item failed.
          Worker ID: textioit0writethenreadall-10162022-cqif-harness-6g0j,

          Root cause: Work item failed.
          Worker ID: textioit0writethenreadall-10162022-cqif-harness-2twj,

          Root cause: Work item failed.
          Worker ID: textioit0writethenreadall-10162022-cqif-harness-rl58,

          Root cause: Work item failed.
          Worker ID: textioit0writethenreadall-10162022-cqif-harness-6g0j
    Oct 17, 2023 3:25:14 AM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
    INFO: 2023-10-17T03:25:13.080Z: Cleaning up.
    Oct 17, 2023 3:25:14 AM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
    INFO: 2023-10-17T03:25:13.162Z: Stopping **** pool...
    Oct 17, 2023 3:27:35 AM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
    INFO: 2023-10-17T03:27:34.493Z: Autoscaling: Resized **** pool from 5 to 0.
    Oct 17, 2023 3:27:35 AM org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
    INFO: 2023-10-17T03:27:34.532Z: Worker pool stopped.
    Oct 17, 2023 3:28:10 AM org.apache.beam.runners.dataflow.DataflowPipelineJob logTerminalState
    INFO: Job 2023-10-16_20_22_01-7141046284732352731 failed with status FAILED.
    Oct 17, 2023 3:28:11 AM org.apache.beam.sdk.testutils.NamedTestResult create
    WARNING: Reset invalid NamedTestResult value -1.697513071748E9 to -1.0.
    Oct 17, 2023 3:28:11 AM org.apache.beam.sdk.testutils.NamedTestResult create
    WARNING: Reset invalid NamedTestResult value -1.697513071748E9 to -1.0.

org.apache.beam.sdk.io.text.TextIOIT > writeThenReadAll STANDARD_OUT
    Load test results for test (ID): fb479616-8bb5-4710-a84d-d3fe4c4d937b and timestamp: 2023-10-17T03:28:10.898000000Z:
                     Metric:                    Value:
                dataset_size                 1.09784E9
                  write_time                      -1.0
                    run_time                      -1.0
                   read_time                       0.0

org.apache.beam.sdk.io.text.TextIOIT > writeThenReadAll STANDARD_ERROR
    ERROR StatusLogger Log4j2 could not find a logging implementation. Please add log4j-core to the classpath. Using SimpleLogger to log to the console...

Gradle Test Executor 1 finished executing tests.

> Task :sdks:java:io:file-based-io-tests:integrationTest FAILED

org.apache.beam.sdk.io.text.TextIOIT > writeThenReadAll FAILED
    java.lang.AssertionError: Values should be different. Actual: FAILED
        at org.junit.Assert.fail(Assert.java:89)
        at org.junit.Assert.failEquals(Assert.java:187)
        at org.junit.Assert.assertNotEquals(Assert.java:163)
        at org.junit.Assert.assertNotEquals(Assert.java:177)
        at org.apache.beam.sdk.io.text.TextIOIT.writeThenReadAll(TextIOIT.java:158)

1 test completed, 1 failed
Finished generating test XML results (0.038 secs) into: <https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.042 secs) into: <https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest>

FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':sdks:java:io:file-based-io-tests:integrationTest'.
> There were failing tests. See the report at: file://<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest/index.html>

Deprecated Gradle features were used in this build, making it incompatible with Gradle 9.0.

You can use '--warning-mode all' to show the individual deprecation warnings and determine if they come from your own scripts or plugins.

For more on this, please refer to https://docs.gradle.org/8.3/userguide/command_line_interface.html#sec:command_line_warnings in the Gradle documentation.

BUILD FAILED in 7m 1s
160 actionable tasks: 96 executed, 64 from cache

Publishing build scan...
https://ge.apache.org/s/mqwdxdpnaz4es

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PerformanceTests_Compressed_TextIOIT_HDFS #7365

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/7365/display/redirect?page=changes>

Changes:

[noreply] Bump github.com/aws/aws-sdk-go-v2/feature/s3/manager in /sdks (#29002)


------------------------------------------
[...truncated 19.29 KB...]
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 14 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 15 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 16 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 17 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 18 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 19 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 20 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 21 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 22 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 23 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 24 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 25 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 26 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 27 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 28 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 29 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 30 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 31 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 32 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 33 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 34 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 35 == \3\6 ]]
+ sleep 10
+ (( i++ ))
+ (( i <= max_retries ))
+ local output
++ eval 'kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop -ojsonpath='\''{.status.loadBalancer.ingress[0].ip}'\'''
+++ kubectl --kubeconfig=<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-beam-performancetests-compressed-textioit-hdfs-7365> --namespace=beam-performancetests-compressed-textioit-hdfs-7365 get svc hadoop '-ojsonpath={.status.loadBalancer.ingress[0].ip}'
+ output=
+ local status=0
+ [[ 0 == 0 ]]
+ [[ -n '' ]]
+ [[ 36 == \3\6 ]]
+ echo 'Command failed after 36 retries'
Command failed after 36 retries
+ return 1
Build step 'Execute shell' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org