You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2021/04/05 10:02:50 UTC

Build failed in Jenkins: beam_LoadTests_Go_Combine_Flink_Batch #131

See <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/131/display/redirect>

Changes:


------------------------------------------
[...truncated 114.74 KB...]
      spec: <
        urn: "beam:coder:length_prefix:v1"
      >
      component_coder_ids: "c3"
    >
  >
  coders: <
    key: "c5"
    value: <
      spec: <
        urn: "beam:coder:kv:v1"
      >
      component_coder_ids: "c0"
      component_coder_ids: "c4"
    >
  >
  coders: <
    key: "c6"
    value: <
      spec: <
        urn: "beam:go:coder:custom:v1"
        payload: "CgRqc29uEosCCBUaKAoLTnVtRWxlbWVudHMaAggCIhJqc29uOiJudW1fcmVjb3JkcyIyAQAaLwoNSW5pdGlhbFNwbGl0cxoCCAIiFWpzb246ImluaXRpYWxfc3BsaXRzIigIMgEBGiMKB0tleVNpemUaAggCIg9qc29uOiJrZXlfc2l6ZSIoEDIBAhonCglWYWx1ZVNpemUaAggCIhFqc29uOiJ2YWx1ZV9zaXplIigYMgEDGioKCk51bUhvdEtleXMaAggCIhNqc29uOiJudW1faG90X2tleXMiKCAyAQQaMgoOSG90S2V5RnJhY3Rpb24aAggOIhdqc29uOiJob3Rfa2V5X2ZyYWN0aW9uIigoMgEFGnkKX2dpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby90ZXN0L2xvYWQvdmVuZG9yL2dpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby9wa2cvYmVhbS5qc29uRW5jEhYIFiIECBlADyoGCBQSAggIKgQIGUABIn8KX2dpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby90ZXN0L2xvYWQvdmVuZG9yL2dpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby9wa2cvYmVhbS5qc29uRGVjEhwIFiIECBlAAyIGCBQSAggIKgQIGUAPKgQIGUAB"
      >
    >
  >
  coders: <
    key: "c7"
    value: <
      spec: <
        urn: "beam:coder:length_prefix:v1"
      >
      component_coder_ids: "c6"
    >
  >
  coders: <
    key: "c8"
    value: <
      spec: <
        urn: "beam:go:coder:custom:v1"
        payload: "ChdvZmZzZXRyYW5nZS5SZXN0cmljdGlvbhKAAQgaSnxnaXRodWIuY29tL2FwYWNoZS9iZWFtL3Nka3MvZ28vdGVzdC9sb2FkL3ZlbmRvci9naXRodWIuY29tL2FwYWNoZS9iZWFtL3Nka3MvZ28vcGtnL2JlYW0vaW8vcnRyYWNrZXJzL29mZnNldHJhbmdlLlJlc3RyaWN0aW9uGpACCnhnaXRodWIuY29tL2FwYWNoZS9iZWFtL3Nka3MvZ28vdGVzdC9sb2FkL3ZlbmRvci9naXRodWIuY29tL2FwYWNoZS9iZWFtL3Nka3MvZ28vcGtnL2JlYW0vaW8vcnRyYWNrZXJzL29mZnNldHJhbmdlLnJlc3RFbmMSkwEIFiKAAQgaSnxnaXRodWIuY29tL2FwYWNoZS9iZWFtL3Nka3MvZ28vdGVzdC9sb2FkL3ZlbmRvci9naXRodWIuY29tL2FwYWNoZS9iZWFtL3Nka3MvZ28vcGtnL2JlYW0vaW8vcnRyYWNrZXJzL29mZnNldHJhbmdlLlJlc3RyaWN0aW9uKgYIFBICCAgqBAgZQAEikAIKeGdpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby90ZXN0L2xvYWQvdmVuZG9yL2dpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby9wa2cvYmVhbS9pby9ydHJhY2tlcnMvb2Zmc2V0cmFuZ2UucmVzdERlYxKTAQgWIgYIFBICCAgqgAEIGkp8Z2l0aHViLmNvbS9hcGFjaGUvYmVhbS9zZGtzL2dvL3Rlc3QvbG9hZC92ZW5kb3IvZ2l0aHViLmNvbS9hcGFjaGUvYmVhbS9zZGtzL2dvL3BrZy9iZWFtL2lvL3J0cmFja2Vycy9vZmZzZXRyYW5nZS5SZXN0cmljdGlvbioECBlAAQ=="
      >
    >
  >
  coders: <
    key: "c9"
    value: <
      spec: <
        urn: "beam:coder:length_prefix:v1"
      >
      component_coder_ids: "c8"
    >
  >
  environments: <
    key: "go"
    value: <
      urn: "beam:env:docker:v1"
      payload: "\n>gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest"
      capabilities: "beam:protocol:progress_reporting:v0"
      capabilities: "beam:protocol:multi_core_bundle_processing:v1"
      capabilities: "beam:version:sdk_base:go"
      capabilities: "beam:coder:bytes:v1"
      capabilities: "beam:coder:bool:v1"
      capabilities: "beam:coder:varint:v1"
      capabilities: "beam:coder:double:v1"
      capabilities: "beam:coder:string_utf8:v1"
      capabilities: "beam:coder:length_prefix:v1"
      capabilities: "beam:coder:kv:v1"
      capabilities: "beam:coder:iterable:v1"
      capabilities: "beam:coder:state_backed_iterable:v1"
      capabilities: "beam:coder:windowed_value:v1"
      capabilities: "beam:coder:global_window:v1"
      capabilities: "beam:coder:interval_window:v1"
      dependencies: <
        type_urn: "beam:artifact:type:go_****_binary:v1"
        role_urn: "beam:artifact:role:staging_to:v1"
        role_payload: "\n\006****"
      >
    >
  >
>
root_transform_ids: "s1"
root_transform_ids: "e4"
root_transform_ids: "s6"
root_transform_ids: "e15"
root_transform_ids: "s4"
root_transform_ids: "e11"
root_transform_ids: "e12"
root_transform_ids: "e16"
root_transform_ids: "s2"
root_transform_ids: "e7"
root_transform_ids: "e8"
root_transform_ids: "s8"
root_transform_ids: "e19"
root_transform_ids: "e20"
requirements: "beam:requirement:pardo:splittable_dofn:v1"
2021/04/05 09:57:36 Prepared job with id: load-tests-go-flink-batch-combine-4-0405065303_80f48e76-25bf-4f1e-90fe-294ab89375ff and staging token: load-tests-go-flink-batch-combine-4-0405065303_80f48e76-25bf-4f1e-90fe-294ab89375ff
2021/04/05 09:57:36 Using specified **** binary: 'linux_amd64/combine'
2021/04/05 09:57:39 Staged binary artifact with token: 
2021/04/05 09:57:40 Submitted job: load0tests0go0flink0batch0combine0400405065303-root-0405095739-a20506da_ffc73673-41e0-4905-8947-a901e73bc684
2021/04/05 09:57:40 Job state: STOPPED
2021/04/05 09:57:40 Job state: STARTING
2021/04/05 09:57:40 Job state: RUNNING
2021/04/05 10:02:48  (): java.util.concurrent.ExecutionException: org.apache.flink.client.program.ProgramInvocationException: Job failed (JobID: 0241b134161be21e001bce355ff65380)
	at java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357)
	at java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1908)
	at org.apache.flink.api.java.ExecutionEnvironment.execute(ExecutionEnvironment.java:864)
	at org.apache.beam.runners.flink.FlinkBatchPortablePipelineTranslator$BatchTranslationContext.execute(FlinkBatchPortablePipelineTranslator.java:199)
	at org.apache.beam.runners.flink.FlinkPipelineRunner.runPipelineWithTranslator(FlinkPipelineRunner.java:118)
	at org.apache.beam.runners.flink.FlinkPipelineRunner.run(FlinkPipelineRunner.java:85)
	at org.apache.beam.runners.jobsubmission.JobInvocation.runPipeline(JobInvocation.java:86)
	at org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:125)
	at org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:57)
	at org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:78)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.flink.client.program.ProgramInvocationException: Job failed (JobID: 0241b134161be21e001bce355ff65380)
	at org.apache.flink.client.deployment.ClusterClientJobClientAdapter.lambda$null$6(ClusterClientJobClientAdapter.java:112)
	at java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:616)
	at java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:591)
	at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
	at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975)
	at org.apache.flink.client.program.rest.RestClusterClient.lambda$pollResourceAsync$21(RestClusterClient.java:565)
	at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:774)
	at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:750)
	at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
	at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975)
	at org.apache.flink.runtime.concurrent.FutureUtils.lambda$retryOperationWithDelay$8(FutureUtils.java:291)
	at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:774)
	at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:750)
	at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
	at java.util.concurrent.CompletableFuture.postFire(CompletableFuture.java:575)
	at java.util.concurrent.CompletableFuture$UniCompose.tryFire(CompletableFuture.java:943)
	at java.util.concurrent.CompletableFuture$Completion.run(CompletableFuture.java:456)
	... 3 more
Caused by: org.apache.flink.runtime.client.JobExecutionException: Job execution failed.
	at org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:147)
	at org.apache.flink.client.deployment.ClusterClientJobClientAdapter.lambda$null$6(ClusterClientJobClientAdapter.java:110)
	... 19 more
Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed by NoRestartBackoffTimeStrategy
	at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:110)
	at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:76)
	at org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:192)
	at org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:186)
	at org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:180)
	at org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:496)
	at org.apache.flink.runtime.scheduler.UpdateSchedulerNgOnInternalFailuresListener.notifyTaskFailure(UpdateSchedulerNgOnInternalFailuresListener.java:49)
	at org.apache.flink.runtime.executiongraph.ExecutionGraph.notifySchedulerNgAboutInternalTaskFailure(ExecutionGraph.java:1703)
	at org.apache.flink.runtime.executiongraph.Execution.processFail(Execution.java:1252)
	at org.apache.flink.runtime.executiongraph.Execution.processFail(Execution.java:1220)
	at org.apache.flink.runtime.executiongraph.Execution.markFailed(Execution.java:1051)
	at org.apache.flink.runtime.executiongraph.ExecutionVertex.markFailed(ExecutionVertex.java:748)
	at org.apache.flink.runtime.scheduler.DefaultExecutionVertexOperations.markFailed(DefaultExecutionVertexOperations.java:41)
	at org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskDeploymentFailure(DefaultScheduler.java:446)
	at org.apache.flink.runtime.scheduler.DefaultScheduler.lambda$assignResourceOrHandleError$5(DefaultScheduler.java:433)
	at java.util.concurrent.CompletableFuture.uniHandle(CompletableFuture.java:836)
	at java.util.concurrent.CompletableFuture$UniHandle.tryFire(CompletableFuture.java:811)
	at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
	at java.util.concurrent.CompletableFuture.completeExceptionally(CompletableFuture.java:1990)
	at org.apache.flink.runtime.jobmaster.slotpool.SchedulerImpl.lambda$internalAllocateSlot$0(SchedulerImpl.java:168)
	at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:774)
	at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:750)
	at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
	at java.util.concurrent.CompletableFuture.completeExceptionally(CompletableFuture.java:1990)
	at org.apache.flink.runtime.jobmaster.slotpool.SlotSharingManager$SingleTaskSlot.release(SlotSharingManager.java:726)
	at org.apache.flink.runtime.jobmaster.slotpool.SlotSharingManager$MultiTaskSlot.release(SlotSharingManager.java:537)
	at org.apache.flink.runtime.jobmaster.slotpool.SlotSharingManager$MultiTaskSlot.lambda$new$0(SlotSharingManager.java:432)
	at java.util.concurrent.CompletableFuture.uniHandle(CompletableFuture.java:836)
	at java.util.concurrent.CompletableFuture$UniHandle.tryFire(CompletableFuture.java:811)
	at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
	at java.util.concurrent.CompletableFuture.completeExceptionally(CompletableFuture.java:1990)
	at org.apache.flink.runtime.concurrent.FutureUtils.lambda$forward$21(FutureUtils.java:1065)
	at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:774)
	at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:750)
	at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
	at java.util.concurrent.CompletableFuture.completeExceptionally(CompletableFuture.java:1990)
	at org.apache.flink.runtime.concurrent.FutureUtils$Timeout.run(FutureUtils.java:999)
	at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRunAsync(AkkaRpcActor.java:402)
	at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:195)
	at org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:74)
	at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:152)
	at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:26)
	at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:21)
	at scala.PartialFunction$class.applyOrElse(PartialFunction.scala:123)
	at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:21)
	at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:170)
	at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
	at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
	at akka.actor.Actor$class.aroundReceive(Actor.scala:517)
	at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225)
	at akka.actor.ActorCell.receiveMessage(ActorCell.scala:592)
	at akka.actor.ActorCell.invoke(ActorCell.scala:561)
	at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258)
	at akka.dispatch.Mailbox.run(Mailbox.scala:225)
	at akka.dispatch.Mailbox.exec(Mailbox.scala:235)
	at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
	at akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
	at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
	at akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
Caused by: org.apache.flink.runtime.jobmanager.scheduler.NoResourceAvailableException: Could not allocate the required slot within slot request timeout. Please make sure that the cluster has enough resources.
	at org.apache.flink.runtime.scheduler.DefaultScheduler.maybeWrapWithNoResourceAvailableException(DefaultScheduler.java:452)
	... 45 more
Caused by: java.util.concurrent.CompletionException: java.util.concurrent.TimeoutException
	at java.util.concurrent.CompletableFuture.encodeThrowable(CompletableFuture.java:292)
	at java.util.concurrent.CompletableFuture.completeThrowable(CompletableFuture.java:308)
	at java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:607)
	at java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:591)
	... 25 more
Caused by: java.util.concurrent.TimeoutException
	... 23 more
2021/04/05 10:02:48  (): java.util.concurrent.TimeoutException
2021/04/05 10:02:49 Job state: FAILED
2021/04/05 10:02:49 Failed to execute job: job load0tests0go0flink0batch0combine0400405065303-root-0405095739-a20506da_ffc73673-41e0-4905-8947-a901e73bc684 failed
panic: Failed to execute job: job load0tests0go0flink0batch0combine0400405065303-root-0405095739-a20506da_ffc73673-41e0-4905-8947-a901e73bc684 failed

goroutine 1 [running]:
github.com/apache/beam/sdks/go/test/load/vendor/github.com/apache/beam/sdks/go/pkg/beam/log.Fatalf(0x11b0540, 0xc00003e0b0, 0x106b826, 0x19, 0xc000239ee8, 0x1, 0x1)
	<https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/src/sdks/go/test/load/.gogradle/project_gopath/src/github.com/apache/beam/sdks/go/test/load/vendor/github.com/apache/beam/sdks/go/pkg/beam/log/log.go>:153 +0xec
main.main()
	<https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/src/sdks/go/test/load/.gogradle/project_gopath/src/github.com/apache/beam/sdks/go/test/load/combine/combine.go>:80 +0x452

> Task :sdks:go:test:load:run FAILED

FAILURE: Build failed with an exception.

* Where:
Build file '<https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/src/sdks/go/test/load/build.gradle'> line: 65

* What went wrong:
Execution failed for task ':sdks:go:test:load:run'.
> Process 'command 'sh'' finished with non-zero exit value 2

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 5m 25s
6 actionable tasks: 6 executed

Publishing build scan...
https://gradle.com/s/5uyvf3qom6gxc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Jenkins build is back to normal : beam_LoadTests_Go_Combine_Flink_Batch #136

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/136/display/redirect?page=changes>


---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_LoadTests_Go_Combine_Flink_Batch #135

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/135/display/redirect?page=changes>

Changes:

[fabien.caylus] [BEAM-12012] Add API key & token authentication in ElasticsearchIO

[fabien.caylus] Simplify arguments checks

[Boyuan Zhang] Change PubSubSource and PubSubSink translation to avoid special

[Andrew Pilloud] Complex Type Passthrough Test

[Andrew Pilloud] Don't use base types in BeamCalcRel

[Kyle Weaver] [BEAM-10925] Refactor ZetaSqlJavaUdfTypeTest.

[Andrew Pilloud] Use correct schema geters, enforce types

[Boyuan Zhang] SDF bounded wrapper returns None when any exception happen in the

[Steve Niemitz] [BEAM-12126] Fix DirectRunner not respecting use_deprecated_reads

[randomstep] [BEAM-12092] Bump jedis to 3.5.2

[noreply] [BEAM-11227] Try reverting #14295: Moving from vendored gRPC 1.26 to

[noreply] Merge pull request #14446 from [BEAM-10854] Fix PeriodicImpulse for

[noreply] Turn on mpyp checks for filesystem (#14425)

[Andrew Pilloud] Rename functions, add comments

[noreply] [BEAM-12112] Disable streaming mode for PORTABILITY_BATCH (#14452)

[noreply] [BEAM-9547] Implementations for a few more DataFrame operations (#14362)

[heejong] [BEAM-12141] Print sha256 and size when downloading artifacts via

[noreply] [BEAM-12128] replace usage of snippets_test_py3.py to snippets_test.py


------------------------------------------
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-1 (beam) in workspace <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/>
The recommended git tool is: NONE
No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/src> # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git --version # 'git version 2.7.4'
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/* # timeout=10
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 974c2de8a45fbe6dae9cf3ad4b1d6a2327f0b9a3 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 974c2de8a45fbe6dae9cf3ad4b1d6a2327f0b9a3 # timeout=10
Commit message: "Merge pull request #14492 from ihji/BEAM-12141"
 > git rev-list --no-walk 572a99bab07e53e043887243e2b1e69120563be5 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1
SETUPTOOLS_USE_DISTUTILS=stdlib

[EnvInject] - Variables injected successfully.
[EnvInject] - Injecting environment variables from a build step.
[EnvInject] - Injecting as environment variables the properties content 
JOB_SERVER_IMAGE=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest
CLUSTER_NAME=beam-loadtests-go-combine-flink-batch-135
DETACHED_MODE=true
HARNESS_IMAGES_TO_PULL=gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest
FLINK_NUM_WORKERS=5
FLINK_DOWNLOAD_URL=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz
GCS_BUCKET=gs://beam-flink-cluster
HADOOP_DOWNLOAD_URL=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar
FLINK_TASKMANAGER_SLOTS=1
ARTIFACTS_DIR=gs://beam-flink-cluster/beam-loadtests-go-combine-flink-batch-135
GCLOUD_ZONE=us-central1-a

[EnvInject] - Variables injected successfully.
[beam_LoadTests_Go_Combine_Flink_Batch] $ /bin/bash -xe /tmp/jenkins794200582965880255.sh
+ echo Setting up flink cluster
Setting up flink cluster
[beam_LoadTests_Go_Combine_Flink_Batch] $ /bin/bash -xe /tmp/jenkins1793963260170852462.sh
+ cd <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/src/.test-infra/dataproc>
+ ./flink_cluster.sh create
+ GCLOUD_ZONE=us-central1-a
+ DATAPROC_VERSION=1.2
+ MASTER_NAME=beam-loadtests-go-combine-flink-batch-135-m
+ INIT_ACTIONS_FOLDER_NAME=init-actions
+ FLINK_INIT=gs://beam-flink-cluster/init-actions/flink.sh
+ BEAM_INIT=gs://beam-flink-cluster/init-actions/beam.sh
+ DOCKER_INIT=gs://beam-flink-cluster/init-actions/docker.sh
+ FLINK_LOCAL_PORT=8081
+ FLINK_TASKMANAGER_SLOTS=1
+ YARN_APPLICATION_MASTER=
+ create
+ upload_init_actions
+ echo 'Uploading initialization actions to GCS bucket: gs://beam-flink-cluster'
Uploading initialization actions to GCS bucket: gs://beam-flink-cluster
+ gsutil cp -r init-actions/beam.sh init-actions/docker.sh init-actions/flink.sh gs://beam-flink-cluster/init-actions
Copying file://init-actions/beam.sh [Content-Type=text/x-sh]...
/ [0 files][    0.0 B/  2.3 KiB]                                                / [1 files][  2.3 KiB/  2.3 KiB]                                                Copying file://init-actions/docker.sh [Content-Type=text/x-sh]...
/ [1 files][  2.3 KiB/  6.0 KiB]                                                / [2 files][  6.0 KiB/  6.0 KiB]                                                Copying file://init-actions/flink.sh [Content-Type=text/x-sh]...
/ [2 files][  6.0 KiB/ 13.5 KiB]                                                / [3 files][ 13.5 KiB/ 13.5 KiB]                                                
Operation completed over 3 objects/13.5 KiB.                                     
+ create_cluster
+ local metadata=flink-snapshot-url=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz,
+ metadata+=flink-start-yarn-session=true,
+ metadata+=flink-taskmanager-slots=1,
+ metadata+=hadoop-jar-url=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar
+ [[ -n gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest ]]
+ metadata+=,beam-sdk-harness-images-to-pull=gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest
+ [[ -n gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest ]]
+ metadata+=,beam-job-server-image=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest
+ local image_version=1.2
+ echo 'Starting dataproc cluster. Dataproc version: 1.2'
Starting dataproc cluster. Dataproc version: 1.2
+ local num_dataproc_****s=6
+ gcloud dataproc clusters create beam-loadtests-go-combine-flink-batch-135 --region=global --num-****s=6 --initialization-actions gs://beam-flink-cluster/init-actions/docker.sh,gs://beam-flink-cluster/init-actions/beam.sh,gs://beam-flink-cluster/init-actions/flink.sh --metadata flink-snapshot-url=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz,flink-start-yarn-session=true,flink-taskmanager-slots=1,hadoop-jar-url=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar,beam-sdk-harness-images-to-pull=gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest,beam-job-server-image=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest, --image-version=1.2 --zone=us-central1-a --quiet
Waiting on operation [projects/apache-beam-testing/regions/global/operations/97fa3ea6-10e1-3d03-a675-baa9946c9652].
Waiting for cluster creation operation...
WARNING: For PD-Standard without local SSDs, we strongly recommend provisioning 1TB or larger to ensure consistently high I/O performance. See https://cloud.google.com/compute/docs/disks/performance for information on disk I/O performance.
................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................done.
ERROR: (gcloud.dataproc.clusters.create) Operation [projects/apache-beam-testing/regions/global/operations/97fa3ea6-10e1-3d03-a675-baa9946c9652] failed: Initialization action timed out. Failed action 'gs://beam-flink-cluster/init-actions/flink.sh', see output in: gs://dataproc-6c5fbcbb-a2de-406e-9cf7-8c1ce0b6a604-us/google-cloud-dataproc-metainfo/4e2cb3a6-de2d-4b41-8a15-58f677a3acf3/beam-loadtests-go-combine-flink-batch-135-w-5/dataproc-initialization-script-2_output.
Build step 'Execute shell' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_LoadTests_Go_Combine_Flink_Batch #134

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/134/display/redirect?page=changes>

Changes:

[noreply] Update WriteToBigQuery multiple destinations doc

[Ismaël Mejía] [BEAM-11948] Drop support for Flink 1.8 and 1.9

[randomstep] [BEAM-11900] Bump libthrift to 0.14.0

[randomstep] [BEAM-11900] Bump libthrift to 0.14.1

[Kyle Weaver] [BEAM-10925] Java UDF type tests for input refs.

[randomstep] [BEAM-12066] Bump classgraph to 4.8.104

[Kyle Weaver] [BEAM-12102] Catch and rethrow Calcite CannotPlanException.

[Kyle Weaver] [BEAM-12095] Add unit tests for path_to_beam_jar(artifact_id).

[kawaigin] [BEAM-10708] Read/Write Intermediate PCollections

[kawaigin] Fix lint

[kawaigin] Fix based on comments

[kawaigin] Added clear method to InMemoryCache because tests might be flaky when a

[noreply] [BEAM-11961] InfluxDBIOIT failing with unauthorized error (#14215)

[noreply] Add DataFrame API changes to CHANGES.md (#14454)

[noreply] Fix: Allow BigQuery tableIds with hyphens (#14125)

[noreply] Merge pull request #14394 from [BEAM-11277] Add method to check if two

[kawaigin] Avoid using interactive_environment module in the test because

[noreply] [BEAM-449] Support PCollectionList in PAssert (#14322)

[kawaigin] [BEAM-11045] Avoid broken deps

[kawaigin] Added back the setUp as additional cleanup routine before each test.

[noreply] [BEAM-11742] Use pyarrow schema instead column names when creating

[noreply] [BEAM-7372] remove usage of future package and unnecessary builtins

[noreply] [BEAM-7372] cleanup codes for py2 compatibility from


------------------------------------------
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-1 (beam) in workspace <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/>
The recommended git tool is: NONE
No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/src> # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git --version # 'git version 2.7.4'
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/* # timeout=10
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 572a99bab07e53e043887243e2b1e69120563be5 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 572a99bab07e53e043887243e2b1e69120563be5 # timeout=10
Commit message: "Merge pull request #14203: [BEAM-11948] Drop support for Flink 1.8 and 1.9"
 > git rev-list --no-walk 3216fcb25287448dca3e78a2fd48aee9ac6422a3 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1
SETUPTOOLS_USE_DISTUTILS=stdlib

[EnvInject] - Variables injected successfully.
[EnvInject] - Injecting environment variables from a build step.
[EnvInject] - Injecting as environment variables the properties content 
JOB_SERVER_IMAGE=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest
CLUSTER_NAME=beam-loadtests-go-combine-flink-batch-134
DETACHED_MODE=true
HARNESS_IMAGES_TO_PULL=gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest
FLINK_NUM_WORKERS=5
FLINK_DOWNLOAD_URL=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz
GCS_BUCKET=gs://beam-flink-cluster
HADOOP_DOWNLOAD_URL=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar
FLINK_TASKMANAGER_SLOTS=1
ARTIFACTS_DIR=gs://beam-flink-cluster/beam-loadtests-go-combine-flink-batch-134
GCLOUD_ZONE=us-central1-a

[EnvInject] - Variables injected successfully.
[beam_LoadTests_Go_Combine_Flink_Batch] $ /bin/bash -xe /tmp/jenkins2983638859365455649.sh
+ echo Setting up flink cluster
Setting up flink cluster
[beam_LoadTests_Go_Combine_Flink_Batch] $ /bin/bash -xe /tmp/jenkins2880989016862136895.sh
+ cd <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/src/.test-infra/dataproc>
+ ./flink_cluster.sh create
+ GCLOUD_ZONE=us-central1-a
+ DATAPROC_VERSION=1.2
+ MASTER_NAME=beam-loadtests-go-combine-flink-batch-134-m
+ INIT_ACTIONS_FOLDER_NAME=init-actions
+ FLINK_INIT=gs://beam-flink-cluster/init-actions/flink.sh
+ BEAM_INIT=gs://beam-flink-cluster/init-actions/beam.sh
+ DOCKER_INIT=gs://beam-flink-cluster/init-actions/docker.sh
+ FLINK_LOCAL_PORT=8081
+ FLINK_TASKMANAGER_SLOTS=1
+ YARN_APPLICATION_MASTER=
+ create
+ upload_init_actions
+ echo 'Uploading initialization actions to GCS bucket: gs://beam-flink-cluster'
Uploading initialization actions to GCS bucket: gs://beam-flink-cluster
+ gsutil cp -r init-actions/beam.sh init-actions/docker.sh init-actions/flink.sh gs://beam-flink-cluster/init-actions
Copying file://init-actions/beam.sh [Content-Type=text/x-sh]...
/ [0 files][    0.0 B/  2.3 KiB]                                                / [1 files][  2.3 KiB/  2.3 KiB]                                                Copying file://init-actions/docker.sh [Content-Type=text/x-sh]...
/ [1 files][  2.3 KiB/  6.0 KiB]                                                / [2 files][  6.0 KiB/  6.0 KiB]                                                Copying file://init-actions/flink.sh [Content-Type=text/x-sh]...
/ [2 files][  6.0 KiB/ 13.5 KiB]                                                / [3 files][ 13.5 KiB/ 13.5 KiB]                                                
Operation completed over 3 objects/13.5 KiB.                                     
+ create_cluster
+ local metadata=flink-snapshot-url=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz,
+ metadata+=flink-start-yarn-session=true,
+ metadata+=flink-taskmanager-slots=1,
+ metadata+=hadoop-jar-url=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar
+ [[ -n gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest ]]
+ metadata+=,beam-sdk-harness-images-to-pull=gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest
+ [[ -n gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest ]]
+ metadata+=,beam-job-server-image=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest
+ local image_version=1.2
+ echo 'Starting dataproc cluster. Dataproc version: 1.2'
Starting dataproc cluster. Dataproc version: 1.2
+ local num_dataproc_****s=6
+ gcloud dataproc clusters create beam-loadtests-go-combine-flink-batch-134 --region=global --num-****s=6 --initialization-actions gs://beam-flink-cluster/init-actions/docker.sh,gs://beam-flink-cluster/init-actions/beam.sh,gs://beam-flink-cluster/init-actions/flink.sh --metadata flink-snapshot-url=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz,flink-start-yarn-session=true,flink-taskmanager-slots=1,hadoop-jar-url=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar,beam-sdk-harness-images-to-pull=gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest,beam-job-server-image=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest, --image-version=1.2 --zone=us-central1-a --quiet
Waiting on operation [projects/apache-beam-testing/regions/global/operations/eeb9cb08-f5ca-328e-992b-b6cfb3a97a2d].
Waiting for cluster creation operation...
WARNING: For PD-Standard without local SSDs, we strongly recommend provisioning 1TB or larger to ensure consistently high I/O performance. See https://cloud.google.com/compute/docs/disks/performance for information on disk I/O performance.
....................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................done.
ERROR: (gcloud.dataproc.clusters.create) Operation [projects/apache-beam-testing/regions/global/operations/eeb9cb08-f5ca-328e-992b-b6cfb3a97a2d] failed: Multiple Errors:
 - Initialization action timed out. Failed action 'gs://beam-flink-cluster/init-actions/flink.sh', see output in: gs://dataproc-6c5fbcbb-a2de-406e-9cf7-8c1ce0b6a604-us/google-cloud-dataproc-metainfo/d9bcc558-b89c-4098-bbe7-d0276bff10a2/beam-loadtests-go-combine-flink-batch-134-m/dataproc-initialization-script-2_output
 - Initialization action timed out. Failed action 'gs://beam-flink-cluster/init-actions/flink.sh', see output in: gs://dataproc-6c5fbcbb-a2de-406e-9cf7-8c1ce0b6a604-us/google-cloud-dataproc-metainfo/d9bcc558-b89c-4098-bbe7-d0276bff10a2/beam-loadtests-go-combine-flink-batch-134-w-0/dataproc-initialization-script-2_output
 - Initialization action timed out. Failed action 'gs://beam-flink-cluster/init-actions/flink.sh', see output in: gs://dataproc-6c5fbcbb-a2de-406e-9cf7-8c1ce0b6a604-us/google-cloud-dataproc-metainfo/d9bcc558-b89c-4098-bbe7-d0276bff10a2/beam-loadtests-go-combine-flink-batch-134-w-1/dataproc-initialization-script-2_output
 - Initialization action timed out. Failed action 'gs://beam-flink-cluster/init-actions/flink.sh', see output in: gs://dataproc-6c5fbcbb-a2de-406e-9cf7-8c1ce0b6a604-us/google-cloud-dataproc-metainfo/d9bcc558-b89c-4098-bbe7-d0276bff10a2/beam-loadtests-go-combine-flink-batch-134-w-5/dataproc-initialization-script-2_output.
Build step 'Execute shell' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_LoadTests_Go_Combine_Flink_Batch #133

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/133/display/redirect?page=changes>

Changes:

[heejong] [BEAM-12051] fix target/go-licenses: no such file or directory

[MATTHEW.Ouyang] [BEAM-12059] include literal T in DATETIME format

[Kyle Weaver] [BEAM-11483] Ignore windowed GBK tests in Spark portable streaming.

[suztomo] protobuf-java to be in-line with libraries-bom 16.3.0

[suztomo] Removing unused variable google_auth_version

[Kyle Weaver] [BEAM-12095] Fix Spark job server in uber jar path as well.

[kawaigin] [BEAM-12096] Attempt to fix flaky test

[kawaigin] Added logging of potential ImportError

[kawaigin] Use PropertyMock to replace the global singleton current_env()

[noreply] [BEAM-7372] Remove dead py<3.6 paths (#14436)

[kawaigin] Changed warning logs about not in REPL env to error level and fixed a

[noreply] [BEAM-9547] Raise WontImplementError for a few more operations (#14330)

[noreply] [BEAM-11544] BQML pattern (#13644)

[noreply] [BEAM-11574] Enable cross-language integration tests on Dataflow

[noreply] [BEAM-11585] Select.flattenedSchema doesn't flatten nested array fields

[noreply] Updating Go tests on PR template. (#14442)

[noreply] [BEAM-7372] cleanup codes for py2 compatibility from

[noreply] Merge pull request #14388 from [BEAM-7372] remove codes for py2

[noreply] Merge pull request #14365 from [BEAM-10884] - Adding tests to


------------------------------------------
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-1 (beam) in workspace <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/>
The recommended git tool is: NONE
No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/src> # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git --version # 'git version 2.7.4'
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/* # timeout=10
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 3216fcb25287448dca3e78a2fd48aee9ac6422a3 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 3216fcb25287448dca3e78a2fd48aee9ac6422a3 # timeout=10
Commit message: "Merge pull request #14412 from [BEAM-8696] protobuf-java 3.14.0 in line with libraries BOM 16.3.0"
 > git rev-list --no-walk bcced0cf3202829eed2152a8eeafaa0e159645e6 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1
SETUPTOOLS_USE_DISTUTILS=stdlib

[EnvInject] - Variables injected successfully.
[EnvInject] - Injecting environment variables from a build step.
[EnvInject] - Injecting as environment variables the properties content 
JOB_SERVER_IMAGE=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest
CLUSTER_NAME=beam-loadtests-go-combine-flink-batch-133
DETACHED_MODE=true
HARNESS_IMAGES_TO_PULL=gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest
FLINK_NUM_WORKERS=5
FLINK_DOWNLOAD_URL=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz
GCS_BUCKET=gs://beam-flink-cluster
HADOOP_DOWNLOAD_URL=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar
FLINK_TASKMANAGER_SLOTS=1
ARTIFACTS_DIR=gs://beam-flink-cluster/beam-loadtests-go-combine-flink-batch-133
GCLOUD_ZONE=us-central1-a

[EnvInject] - Variables injected successfully.
[beam_LoadTests_Go_Combine_Flink_Batch] $ /bin/bash -xe /tmp/jenkins632093707593018239.sh
+ echo Setting up flink cluster
Setting up flink cluster
[beam_LoadTests_Go_Combine_Flink_Batch] $ /bin/bash -xe /tmp/jenkins237918862635725594.sh
+ cd <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/src/.test-infra/dataproc>
+ ./flink_cluster.sh create
+ GCLOUD_ZONE=us-central1-a
+ DATAPROC_VERSION=1.2
+ MASTER_NAME=beam-loadtests-go-combine-flink-batch-133-m
+ INIT_ACTIONS_FOLDER_NAME=init-actions
+ FLINK_INIT=gs://beam-flink-cluster/init-actions/flink.sh
+ BEAM_INIT=gs://beam-flink-cluster/init-actions/beam.sh
+ DOCKER_INIT=gs://beam-flink-cluster/init-actions/docker.sh
+ FLINK_LOCAL_PORT=8081
+ FLINK_TASKMANAGER_SLOTS=1
+ YARN_APPLICATION_MASTER=
+ create
+ upload_init_actions
+ echo 'Uploading initialization actions to GCS bucket: gs://beam-flink-cluster'
Uploading initialization actions to GCS bucket: gs://beam-flink-cluster
+ gsutil cp -r init-actions/beam.sh init-actions/docker.sh init-actions/flink.sh gs://beam-flink-cluster/init-actions
Copying file://init-actions/beam.sh [Content-Type=text/x-sh]...
/ [0 files][    0.0 B/  2.3 KiB]                                                / [1 files][  2.3 KiB/  2.3 KiB]                                                Copying file://init-actions/docker.sh [Content-Type=text/x-sh]...
/ [1 files][  2.3 KiB/  6.0 KiB]                                                / [2 files][  6.0 KiB/  6.0 KiB]                                                Copying file://init-actions/flink.sh [Content-Type=text/x-sh]...
/ [2 files][  6.0 KiB/ 13.5 KiB]                                                / [3 files][ 13.5 KiB/ 13.5 KiB]                                                
Operation completed over 3 objects/13.5 KiB.                                     
+ create_cluster
+ local metadata=flink-snapshot-url=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz,
+ metadata+=flink-start-yarn-session=true,
+ metadata+=flink-taskmanager-slots=1,
+ metadata+=hadoop-jar-url=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar
+ [[ -n gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest ]]
+ metadata+=,beam-sdk-harness-images-to-pull=gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest
+ [[ -n gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest ]]
+ metadata+=,beam-job-server-image=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest
+ local image_version=1.2
+ echo 'Starting dataproc cluster. Dataproc version: 1.2'
Starting dataproc cluster. Dataproc version: 1.2
+ local num_dataproc_****s=6
+ gcloud dataproc clusters create beam-loadtests-go-combine-flink-batch-133 --region=global --num-****s=6 --initialization-actions gs://beam-flink-cluster/init-actions/docker.sh,gs://beam-flink-cluster/init-actions/beam.sh,gs://beam-flink-cluster/init-actions/flink.sh --metadata flink-snapshot-url=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz,flink-start-yarn-session=true,flink-taskmanager-slots=1,hadoop-jar-url=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar,beam-sdk-harness-images-to-pull=gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest,beam-job-server-image=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest, --image-version=1.2 --zone=us-central1-a --quiet
Waiting on operation [projects/apache-beam-testing/regions/global/operations/8b11d5d7-2120-3849-ad49-16d01c827b00].
Waiting for cluster creation operation...
WARNING: For PD-Standard without local SSDs, we strongly recommend provisioning 1TB or larger to ensure consistently high I/O performance. See https://cloud.google.com/compute/docs/disks/performance for information on disk I/O performance.
....................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................done.
ERROR: (gcloud.dataproc.clusters.create) Operation [projects/apache-beam-testing/regions/global/operations/8b11d5d7-2120-3849-ad49-16d01c827b00] failed: Multiple Errors:
 - Initialization action timed out. Failed action 'gs://beam-flink-cluster/init-actions/flink.sh', see output in: gs://dataproc-6c5fbcbb-a2de-406e-9cf7-8c1ce0b6a604-us/google-cloud-dataproc-metainfo/5b1a5cf7-69db-4082-b7ed-6a2c7cb8fc9e/beam-loadtests-go-combine-flink-batch-133-m/dataproc-initialization-script-2_output
 - Initialization action timed out. Failed action 'gs://beam-flink-cluster/init-actions/flink.sh', see output in: gs://dataproc-6c5fbcbb-a2de-406e-9cf7-8c1ce0b6a604-us/google-cloud-dataproc-metainfo/5b1a5cf7-69db-4082-b7ed-6a2c7cb8fc9e/beam-loadtests-go-combine-flink-batch-133-w-0/dataproc-initialization-script-2_output
 - Initialization action timed out. Failed action 'gs://beam-flink-cluster/init-actions/flink.sh', see output in: gs://dataproc-6c5fbcbb-a2de-406e-9cf7-8c1ce0b6a604-us/google-cloud-dataproc-metainfo/5b1a5cf7-69db-4082-b7ed-6a2c7cb8fc9e/beam-loadtests-go-combine-flink-batch-133-w-1/dataproc-initialization-script-2_output
 - Initialization action timed out. Failed action 'gs://beam-flink-cluster/init-actions/flink.sh', see output in: gs://dataproc-6c5fbcbb-a2de-406e-9cf7-8c1ce0b6a604-us/google-cloud-dataproc-metainfo/5b1a5cf7-69db-4082-b7ed-6a2c7cb8fc9e/beam-loadtests-go-combine-flink-batch-133-w-5/dataproc-initialization-script-2_output.
Build step 'Execute shell' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_LoadTests_Go_Combine_Flink_Batch #132

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/132/display/redirect?page=changes>

Changes:

[chamikaramj] Updates Dataflow worker pool config to include all environments used by

[Kyle Weaver] [BEAM-10925] Roundtrip tests for literals through Java UDF.

[Ismaël Mejía] [BEAM-4106] Add FileStagingOptions and merge staging file options

[randomstep] [BEAM-12067] Bump elasticsearch-rest-high-level-client to 7.12.0

[Kyle Weaver] [BEAM-12095] Fix Spark job server path.

[noreply] [BEAM-9615] Misc final schema cleanups. (#14285)

[noreply] [BEAM-12083] Nexmark Query 13. (#14404)

[noreply] remove typo in encoding.go

[Kyle Weaver] [BEAM-10925] Simplify test setup.

[kileysok] Optimize reservoir sampling calculation

[noreply] [BEAM-12060] Remove overwriting jenkins property. (#14432)


------------------------------------------
Started by timer
Running as SYSTEM
[EnvInject] - Loading node environment variables.
Building remotely on apache-beam-jenkins-1 (beam) in workspace <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/>
The recommended git tool is: NONE
No credentials specified
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://github.com/apache/beam.git
 > git init <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/src> # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git --version # timeout=10
 > git --version # 'git version 2.7.4'
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
 > git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/* # timeout=10
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision bcced0cf3202829eed2152a8eeafaa0e159645e6 (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f bcced0cf3202829eed2152a8eeafaa0e159645e6 # timeout=10
Commit message: "Merge pull request #14406 from [BEAM-11836] Optimize reservoir sampling calculation in PCollectionConsumerRegistry"
 > git rev-list --no-walk bee495f335fefd6ec2a4d5b0ef355b6011c39bcd # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the SCM step.
[EnvInject] - Injecting as environment variables the properties content 
SPARK_LOCAL_IP=127.0.0.1
SETUPTOOLS_USE_DISTUTILS=stdlib

[EnvInject] - Variables injected successfully.
[EnvInject] - Injecting environment variables from a build step.
[EnvInject] - Injecting as environment variables the properties content 
JOB_SERVER_IMAGE=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest
CLUSTER_NAME=beam-loadtests-go-combine-flink-batch-132
DETACHED_MODE=true
HARNESS_IMAGES_TO_PULL=gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest
FLINK_NUM_WORKERS=5
FLINK_DOWNLOAD_URL=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz
GCS_BUCKET=gs://beam-flink-cluster
HADOOP_DOWNLOAD_URL=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar
FLINK_TASKMANAGER_SLOTS=1
ARTIFACTS_DIR=gs://beam-flink-cluster/beam-loadtests-go-combine-flink-batch-132
GCLOUD_ZONE=us-central1-a

[EnvInject] - Variables injected successfully.
[beam_LoadTests_Go_Combine_Flink_Batch] $ /bin/bash -xe /tmp/jenkins3334274521335447328.sh
+ echo Setting up flink cluster
Setting up flink cluster
[beam_LoadTests_Go_Combine_Flink_Batch] $ /bin/bash -xe /tmp/jenkins4970668652433447732.sh
+ cd <https://ci-beam.apache.org/job/beam_LoadTests_Go_Combine_Flink_Batch/ws/src/.test-infra/dataproc>
+ ./flink_cluster.sh create
+ GCLOUD_ZONE=us-central1-a
+ DATAPROC_VERSION=1.2
+ MASTER_NAME=beam-loadtests-go-combine-flink-batch-132-m
+ INIT_ACTIONS_FOLDER_NAME=init-actions
+ FLINK_INIT=gs://beam-flink-cluster/init-actions/flink.sh
+ BEAM_INIT=gs://beam-flink-cluster/init-actions/beam.sh
+ DOCKER_INIT=gs://beam-flink-cluster/init-actions/docker.sh
+ FLINK_LOCAL_PORT=8081
+ FLINK_TASKMANAGER_SLOTS=1
+ YARN_APPLICATION_MASTER=
+ create
+ upload_init_actions
+ echo 'Uploading initialization actions to GCS bucket: gs://beam-flink-cluster'
Uploading initialization actions to GCS bucket: gs://beam-flink-cluster
+ gsutil cp -r init-actions/beam.sh init-actions/docker.sh init-actions/flink.sh gs://beam-flink-cluster/init-actions
Copying file://init-actions/beam.sh [Content-Type=text/x-sh]...
/ [0 files][    0.0 B/  2.3 KiB]                                                / [1 files][  2.3 KiB/  2.3 KiB]                                                Copying file://init-actions/docker.sh [Content-Type=text/x-sh]...
/ [1 files][  2.3 KiB/  6.0 KiB]                                                / [2 files][  6.0 KiB/  6.0 KiB]                                                Copying file://init-actions/flink.sh [Content-Type=text/x-sh]...
/ [2 files][  6.0 KiB/ 13.5 KiB]                                                / [3 files][ 13.5 KiB/ 13.5 KiB]                                                
Operation completed over 3 objects/13.5 KiB.                                     
+ create_cluster
+ local metadata=flink-snapshot-url=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz,
+ metadata+=flink-start-yarn-session=true,
+ metadata+=flink-taskmanager-slots=1,
+ metadata+=hadoop-jar-url=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar
+ [[ -n gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest ]]
+ metadata+=,beam-sdk-harness-images-to-pull=gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest
+ [[ -n gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest ]]
+ metadata+=,beam-job-server-image=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest
+ local image_version=1.2
+ echo 'Starting dataproc cluster. Dataproc version: 1.2'
Starting dataproc cluster. Dataproc version: 1.2
+ local num_dataproc_****s=6
+ gcloud dataproc clusters create beam-loadtests-go-combine-flink-batch-132 --region=global --num-****s=6 --initialization-actions gs://beam-flink-cluster/init-actions/docker.sh,gs://beam-flink-cluster/init-actions/beam.sh,gs://beam-flink-cluster/init-actions/flink.sh --metadata flink-snapshot-url=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz,flink-start-yarn-session=true,flink-taskmanager-slots=1,hadoop-jar-url=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar,beam-sdk-harness-images-to-pull=gcr.io/apache-beam-testing/beam_portability/beam_go_sdk:latest,beam-job-server-image=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:latest, --image-version=1.2 --zone=us-central1-a --quiet
Waiting on operation [projects/apache-beam-testing/regions/global/operations/77d4cadd-f251-3638-8fdf-4ede5368c433].
Waiting for cluster creation operation...
WARNING: For PD-Standard without local SSDs, we strongly recommend provisioning 1TB or larger to ensure consistently high I/O performance. See https://cloud.google.com/compute/docs/disks/performance for information on disk I/O performance.
....................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................done.
ERROR: (gcloud.dataproc.clusters.create) Operation [projects/apache-beam-testing/regions/global/operations/77d4cadd-f251-3638-8fdf-4ede5368c433] failed: Multiple Errors:
 - Initialization action timed out. Failed action 'gs://beam-flink-cluster/init-actions/flink.sh', see output in: gs://dataproc-6c5fbcbb-a2de-406e-9cf7-8c1ce0b6a604-us/google-cloud-dataproc-metainfo/c1a7abba-18b2-4193-9a34-6069024e8778/beam-loadtests-go-combine-flink-batch-132-m/dataproc-initialization-script-2_output
 - Initialization action timed out. Failed action 'gs://beam-flink-cluster/init-actions/flink.sh', see output in: gs://dataproc-6c5fbcbb-a2de-406e-9cf7-8c1ce0b6a604-us/google-cloud-dataproc-metainfo/c1a7abba-18b2-4193-9a34-6069024e8778/beam-loadtests-go-combine-flink-batch-132-w-0/dataproc-initialization-script-2_output
 - Initialization action timed out. Failed action 'gs://beam-flink-cluster/init-actions/flink.sh', see output in: gs://dataproc-6c5fbcbb-a2de-406e-9cf7-8c1ce0b6a604-us/google-cloud-dataproc-metainfo/c1a7abba-18b2-4193-9a34-6069024e8778/beam-loadtests-go-combine-flink-batch-132-w-1/dataproc-initialization-script-2_output
 - Initialization action timed out. Failed action 'gs://beam-flink-cluster/init-actions/flink.sh', see output in: gs://dataproc-6c5fbcbb-a2de-406e-9cf7-8c1ce0b6a604-us/google-cloud-dataproc-metainfo/c1a7abba-18b2-4193-9a34-6069024e8778/beam-loadtests-go-combine-flink-batch-132-w-5/dataproc-initialization-script-2_output.
Build step 'Execute shell' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org