You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2018/05/31 06:11:16 UTC

Build failed in Jenkins: beam_PerformanceTests_Compressed_TextIOIT_HDFS #236

See <https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/236/display/redirect?page=changes>

Changes:

[kenn] Remove Schema.TypeName.type()

[kenn] Remove unsafe builder methods from FieldType

------------------------------------------
[...truncated 371.30 KB...]
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.AssignWindowsParDoFnFactory$AssignWindowsParDoFn.processElement(AssignWindowsParDoFnFactory.java:118)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:271)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:211)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:66)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:436)
    	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:424)
    	at org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn.processElement(FileBasedIOITHelper.java:78)
    	at org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn$DoFnInvoker.invokeProcessElement(Unknown Source)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
    	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
    	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:200)
    	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
    	at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    	at java.lang.Thread.run(Thread.java:745)
    org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /.temp-beam-2018-05-31_06-05-13-0/ca09e082-ad1c-4320-bafe-e31297083327 could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
    	at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1550)
    	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3110)
    	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3034)
    	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:723)
    	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
    	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
    	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
    	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
    	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at javax.security.auth.Subject.doAs(Subject.java:422)
    	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
    	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)

    	at org.apache.hadoop.ipc.Client.call(Client.java:1475)
    	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
    	at com.sun.proxy.$Proxy64.addBlock(Unknown Source)
    	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
    	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    	at java.lang.reflect.Method.invoke(Method.java:498)
    	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
    	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
    	at com.sun.proxy.$Proxy65.addBlock(Unknown Source)
    	at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1455)
    	at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1251)
    	at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:448)
    	Suppressed: java.io.IOException: Failed closing channel to hdfs://35.193.26.156:9000/.temp-beam-2018-05-31_06-05-13-0/ca09e082-ad1c-4320-bafe-e31297083327
    		at org.apache.beam.sdk.io.FileBasedSink$Writer.close(FileBasedSink.java:1004)
    		at org.apache.beam.sdk.io.WriteFiles.writeOrClose(WriteFiles.java:553)
    		at org.apache.beam.sdk.io.WriteFiles.access$1000(WriteFiles.java:112)
    		at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn.processElement(WriteFiles.java:521)
    		at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn$DoFnInvoker.invokeProcessElement(Unknown Source)
    		at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
    		at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:138)
    		at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    		at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    		at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    		at com.google.cloud.dataflow.worker.AssignWindowsParDoFnFactory$AssignWindowsParDoFn.processElement(AssignWindowsParDoFnFactory.java:118)
    		at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    		at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    		at com.google.cloud.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:271)
    		at org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:211)
    		at org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:66)
    		at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:436)
    		at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:424)
    		at org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn.processElement(FileBasedIOITHelper.java:78)
    		at org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn$DoFnInvoker.invokeProcessElement(Unknown Source)
    		at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
    		at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
    		at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
    		at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
    		at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
    		at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:200)
    		at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
    		at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
    		at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
    		at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
    		at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
    		at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
    		at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
    		at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
    		at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    		at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    		at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    		at java.lang.Thread.run(Thread.java:745)
    	[CIRCULAR REFERENCE:org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /.temp-beam-2018-05-31_06-05-13-0/ca09e082-ad1c-4320-bafe-e31297083327 could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
    	at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1550)
    	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3110)
    	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3034)
    	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:723)
    	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
    	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
    	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
    	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
    	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
    	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at javax.security.auth.Subject.doAs(Subject.java:422)
    	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
    	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
    ]
    Workflow failed. Causes: S02:Generate sequence/Read(BoundedCountingSource)+Produce text lines+Write content to files/WriteFiles/RewindowIntoGlobal/Window.Assign+Write content to files/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles+Write content to files/WriteFiles/GatherTempFileResults/View.AsList/ParDo(ToIsmRecordForGlobalWindow)+Write content to files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify+Write content to files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
      textioit0writethenreadall-05302305-8sx4-harness-1q87,
      textioit0writethenreadall-05302305-8sx4-harness-1q87,
      textioit0writethenreadall-05302305-8sx4-harness-1q87,
      textioit0writethenreadall-05302305-8sx4-harness-1q87
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:348)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:329)
        at org.apache.beam.sdk.io.text.TextIOIT.writeThenReadAll(TextIOIT.java:114)

1 test completed, 1 failed
Finished generating test XML results (0.025 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.032 secs) into: <https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest>
:beam-sdks-java-io-file-based-io-tests:integrationTest (Thread[Task worker for ':' Thread 14,5,main]) completed. Took 4 mins 14.862 secs.

Deprecated Gradle features were used in this build, making it incompatible with Gradle 5.0.
See https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 4m 25s
73 actionable tasks: 1 executed, 72 up-to-date

Publishing build scan...
https://gradle.com/s/ce4745avsqxqq


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-file-based-io-tests:integrationTest'.
> There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task ':beam-sdks-java-io-file-based-io-tests:integrationTest'.
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
	at org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
	at org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
	at org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
	at org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
	at org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
	at org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the report at: file://<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest/index.html>
	at org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
	at org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
	at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
	... 31 more


* Get more help at https://help.gradle.org

2018-05-31 06:09:26,089 1b04dcc1 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 667, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 547, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-31 06:09:26,090 1b04dcc1 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-31 06:09:26,091 1b04dcc1 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-filebasedioithdfs-236> delete -f <https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/src/.test-infra/kubernetes/hadoop/LargeITCluster/hdfs-multi-datanode-cluster.yml> --ignore-not-found
2018-05-31 06:11:15,975 1b04dcc1 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 801, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 667, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 547, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-31 06:11:15,976 1b04dcc1 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-05-31 06:11:16,039 1b04dcc1 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-31 06:11:16,040 1b04dcc1 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/runs/1b04dcc1/pkb.log>
2018-05-31 06:11:16,041 1b04dcc1 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/runs/1b04dcc1/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Jenkins build is back to normal : beam_PerformanceTests_Compressed_TextIOIT_HDFS #237

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/237/display/redirect?page=changes>