You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2018/04/13 18:14:50 UTC

Build failed in Jenkins: beam_PerformanceTests_XmlIOIT_HDFS #44

See <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/44/display/redirect?page=changes>

Changes:

[ehudm] Add lint checks for modules under sdks/python/.

[kedin] Add Row Json Deserializer

[kedin] Add RowJsonValueExtractors

[aaltay] [BEAM-4028] Adding NameContext to Python SDK. (#5043)

------------------------------------------
[...truncated 181.54 KB...]
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
	at com.sun.proxy.$Proxy61.create(Unknown Source)
	at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1623)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1703)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1638)
	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:448)
	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:444)
	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:459)
	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:387)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:892)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:789)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:778)
	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:109)
	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:68)
	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:248)
	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:235)
	at org.apache.beam.sdk.io.FileBasedSink$Writer.open(FileBasedSink.java:923)
	at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn.processElement(WriteFiles.java:503)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:495)
	at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:614)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:712)
	at org.apache.hadoop.ipc.Client$Connection.access$2900(Client.java:375)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1528)
	at org.apache.hadoop.ipc.Client.call(Client.java:1451)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy60.create(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:296)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
	at com.sun.proxy.$Proxy61.create(Unknown Source)
	at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1623)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1703)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1638)
	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:448)
	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:444)
	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:459)
	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:387)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:892)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:789)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:778)
	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:109)
	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:68)
	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:248)
	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:235)
	at org.apache.beam.sdk.io.FileBasedSink$Writer.open(FileBasedSink.java:923)
	at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn.processElement(WriteFiles.java:503)
	at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn$DoFnInvoker.invokeProcessElement(Unknown Source)
	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:138)
	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
	at com.google.cloud.dataflow.worker.SimpleParDoFn$1.output(SimpleParDoFn.java:271)
	at org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:211)
	at org.apache.beam.runners.core.SimpleDoFnRunner.access$700(SimpleDoFnRunner.java:66)
	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:436)
	at org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:424)
	at org.apache.beam.sdk.transforms.MapElements$1.processElement(MapElements.java:129)
	at org.apache.beam.sdk.transforms.MapElements$1$DoFnInvoker.invokeProcessElement(Unknown Source)
	at org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
	at org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
	at com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
	at com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
	at com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:189)
	at com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:150)
	at com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:74)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:381)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:353)
	at com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:284)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
	at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
Workflow failed. Causes: S03:Generate sequence/Read(BoundedCountingSource)+Create xml records/Map+Write xml files/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles+Write xml files/WriteFiles/GatherTempFileResults/Add void key/AddKeys/Map+Write xml files/WriteFiles/GatherTempFileResults/Reshuffle/Window.Into()/Window.Assign+Write xml files/WriteFiles/GatherTempFileResults/Reshuffle/GroupByKey/Reify+Write xml files/WriteFiles/GatherTempFileResults/Reshuffle/GroupByKey/Write+Write xml files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify+Write xml files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
  xmlioit0writethenreadall--04131109-89iw-harness-g4f3,
  xmlioit0writethenreadall--04131109-89iw-harness-rddz,
  xmlioit0writethenreadall--04131109-89iw-harness-z237,
  xmlioit0writethenreadall--04131109-89iw-harness-x6r1
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
	at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
	at org.apache.beam.sdk.io.xml.XmlIOIT.writeThenReadAll(XmlIOIT.java:131)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Errors: 
[ERROR]   XmlIOIT.writeThenReadAll:131  Runtime java.net.ConnectException: Call From xm...
[INFO] 
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-dependency-plugin:3.0.2:analyze-only (default) @ beam-sdks-java-io-file-based-io-tests ---
[INFO] No dependency problems found
[INFO] 
[INFO] --- maven-failsafe-plugin:2.21.0:verify (default) @ beam-sdks-java-io-file-based-io-tests ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/runs/2ae39f5e/beam/sdks/java/io/file-based-io-tests/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 05:46 min
[INFO] Finished at: 2018-04-13T18:14:46Z
[INFO] Final Memory: 106M/1424M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-file-based-io-tests: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/runs/2ae39f5e/beam/sdks/java/io/file-based-io-tests/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-file-based-io-tests: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/runs/2ae39f5e/beam/sdks/java/io/file-based-io-tests/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:213)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.apache.maven.plugin.MojoFailureException: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/runs/2ae39f5e/beam/sdks/java/io/file-based-io-tests/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.plugin.surefire.SurefireHelper.throwException (SurefireHelper.java:240)
    at org.apache.maven.plugin.surefire.SurefireHelper.reportExecution (SurefireHelper.java:112)
    at org.apache.maven.plugin.failsafe.VerifyMojo.execute (VerifyMojo.java:192)
    at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:134)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
[ERROR] 
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException

STDERR: 
2018-04-13 18:14:47,268 2ae39f5e MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 623, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-13 18:14:47,318 2ae39f5e MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-04-13 18:14:47,318 2ae39f5e MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/config-filebasedioithdfs-1523631699774> delete -f <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/src/.test-infra/kubernetes/hadoop/SmallITCluster/hdfs-single-datanode-cluster.yml>
2018-04-13 18:14:49,336 2ae39f5e MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/config-filebasedioithdfs-1523631699774> delete -f <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/src/.test-infra/kubernetes/hadoop/SmallITCluster/hdfs-single-datanode-cluster-for-local-dev.yml>
2018-04-13 18:14:49,533 2ae39f5e MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 757, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 623, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-13 18:14:49,533 2ae39f5e MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-04-13 18:14:49,557 2ae39f5e MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-04-13 18:14:49,558 2ae39f5e MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/runs/2ae39f5e/pkb.log>
2018-04-13 18:14:49,558 2ae39f5e MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/runs/2ae39f5e/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Jenkins build is back to normal : beam_PerformanceTests_XmlIOIT_HDFS #46

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/46/display/redirect?page=changes>


Build failed in Jenkins: beam_PerformanceTests_XmlIOIT_HDFS #45

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/45/display/redirect?page=changes>

Changes:

[sidhom] Support impulse transforms in Flink

[sidhom] Add Impulse ValidatesRunner test

[tgroh] Fix materializesWithDifferentEnvConsumer

[tgroh] Reduce Requirements to be considered a Primitve

[sidhom] [BEAM-3994] Use typed client pool sinks and sources

[sidhom] [BEAM-3966] Move functional utilities into shared module

[sidhom] Use general functional interfaces in ControlClientPool

[sidhom] Rename createLinked() to createBuffered() in QueueControlClientPool

[github] Add region argument to dataflow.go

[github] Region isn't on proto; create and get instead.

[tgroh] Rename `defaultRegistry` to `javaSdkNativeRegistry`

------------------------------------------
[...truncated 74.23 KB...]
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2447)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2335)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:623)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:397)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)

	at org.apache.hadoop.ipc.Client.call(Client.java:1475)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy60.create(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:296)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
	at com.sun.proxy.$Proxy61.create(Unknown Source)
	at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1623)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1703)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1638)
	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:448)
	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:444)
	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:459)
	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:387)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:892)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:789)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:778)
	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:109)
	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:68)
	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:248)
	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:235)
	at org.apache.beam.sdk.io.FileBasedSink$Writer.open(FileBasedSink.java:923)
	at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn.processElement(WriteFiles.java:503)
org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.SafeModeException): Cannot create file/TEXTIO_IT__1523667505656/.temp-beam-2018-04-14_00-58-25-0/9d1d4c3c-ed72-49b8-9aad-4c4dca14be94. Name node is in safe mode.
The reported blocks 31 has reached the threshold 0.9990 of total blocks 31. The number of live datanodes 1 has reached the minimum number 0. In safe mode extension. Safe mode will be turned off automatically in 8 seconds.
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkNameNodeSafeMode(FSNamesystem.java:1327)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2447)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2335)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:623)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:397)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)

	at org.apache.hadoop.ipc.Client.call(Client.java:1475)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy60.create(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:296)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
	at com.sun.proxy.$Proxy61.create(Unknown Source)
	at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1623)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1703)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1638)
	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:448)
	at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:444)
	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:459)
	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:387)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:892)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:789)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:778)
	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:109)
	at org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:68)
	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:248)
	at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:235)
	at org.apache.beam.sdk.io.FileBasedSink$Writer.open(FileBasedSink.java:923)
	at org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn.processElement(WriteFiles.java:503)
Workflow failed. Causes: S03:Generate sequence/Read(BoundedCountingSource)+Create xml records/Map+Write xml files/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles+Write xml files/WriteFiles/GatherTempFileResults/Add void key/AddKeys/Map+Write xml files/WriteFiles/GatherTempFileResults/Reshuffle/Window.Into()/Window.Assign+Write xml files/WriteFiles/GatherTempFileResults/Reshuffle/GroupByKey/Reify+Write xml files/WriteFiles/GatherTempFileResults/Reshuffle/GroupByKey/Write+Write xml files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify+Write xml files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write failed., A work item was attempted 4 times without success. Each time the worker eventually lost contact with the service. The work item was attempted on: 
  xmlioit0writethenreadall--04131758-kw7q-harness-d52l,
  xmlioit0writethenreadall--04131758-kw7q-harness-t2pg,
  xmlioit0writethenreadall--04131758-kw7q-harness-2hfd,
  xmlioit0writethenreadall--04131758-kw7q-harness-89db
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
	at org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
	at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
	at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
	at org.apache.beam.sdk.io.xml.XmlIOIT.writeThenReadAll(XmlIOIT.java:131)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:317)
	at org.junit.rules.RunRules.evaluate(RunRules.java:20)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
	at org.apache.maven.surefire.junitcore.pc.Scheduler$1.run(Scheduler.java:410)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

[INFO] 
[INFO] Results:
[INFO] 
[ERROR] Errors: 
[ERROR]   XmlIOIT.writeThenReadAll:131  Runtime org.apache.hadoop.ipc.RemoteException(o...
[INFO] 
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-dependency-plugin:3.0.2:analyze-only (default) @ beam-sdks-java-io-file-based-io-tests ---
[INFO] No dependency problems found
[INFO] 
[INFO] --- maven-failsafe-plugin:2.21.0:verify (default) @ beam-sdks-java-io-file-based-io-tests ---
[INFO] Failsafe report directory: <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/runs/9d34c700/beam/sdks/java/io/file-based-io-tests/target/failsafe-reports>
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 03:51 min
[INFO] Finished at: 2018-04-14T01:02:01Z
[INFO] Final Memory: 106M/1415M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-file-based-io-tests: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/runs/9d34c700/beam/sdks/java/io/file-based-io-tests/target/failsafe-reports> for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
[ERROR] -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.apache.maven.plugins:maven-failsafe-plugin:2.21.0:verify (default) on project beam-sdks-java-io-file-based-io-tests: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/runs/9d34c700/beam/sdks/java/io/file-based-io-tests/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:213)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.apache.maven.plugin.MojoFailureException: There are test failures.

Please refer to <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/runs/9d34c700/beam/sdks/java/io/file-based-io-tests/target/failsafe-reports> for the individual test results.
Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream.
    at org.apache.maven.plugin.surefire.SurefireHelper.throwException (SurefireHelper.java:240)
    at org.apache.maven.plugin.surefire.SurefireHelper.reportExecution (SurefireHelper.java:112)
    at org.apache.maven.plugin.failsafe.VerifyMojo.execute (VerifyMojo.java:192)
    at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:134)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
    at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
    at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
    at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
    at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
    at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
    at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
    at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
    at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
    at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
    at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke (Method.java:498)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
    at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
    at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
    at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
[ERROR] 
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException

STDERR: 
2018-04-14 01:02:01,738 9d34c700 MainThread beam_integration_benchmark(1/1) ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 623, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-14 01:02:01,739 9d34c700 MainThread beam_integration_benchmark(1/1) INFO     Cleaning up benchmark beam_integration_benchmark
2018-04-14 01:02:01,740 9d34c700 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/config-filebasedioithdfs-1523651176931> delete -f <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/src/.test-infra/kubernetes/hadoop/SmallITCluster/hdfs-single-datanode-cluster.yml>
2018-04-14 01:02:02,344 9d34c700 MainThread beam_integration_benchmark(1/1) INFO     Running: kubectl --kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/config-filebasedioithdfs-1523651176931> delete -f <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/src/.test-infra/kubernetes/hadoop/SmallITCluster/hdfs-single-datanode-cluster-for-local-dev.yml>
2018-04-14 01:02:02,540 9d34c700 MainThread beam_integration_benchmark(1/1) ERROR    Exception running benchmark
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 757, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 623, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",> line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",> line 159, in Run
    job_type=job_type)
  File "<https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",> line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-04-14 01:02:02,541 9d34c700 MainThread beam_integration_benchmark(1/1) ERROR    Benchmark 1/1 beam_integration_benchmark (UID: beam_integration_benchmark0) failed. Execution will continue.
2018-04-14 01:02:02,576 9d34c700 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                  
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-04-14 01:02:02,577 9d34c700 MainThread INFO     Complete logs can be found at: <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/runs/9d34c700/pkb.log>
2018-04-14 01:02:02,577 9d34c700 MainThread INFO     Completion statuses can be found at: <https://builds.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/ws/runs/9d34c700/completion_statuses.json>
Build step 'Execute shell' marked build as failure